gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.config; import java.io.File; import java.util.Date; import com.thoughtworks.go.config.materials.MaterialConfigs; import com.thoughtworks.go.config.materials.Materials; import com.thoughtworks.go.config.materials.dependency.DependencyMaterial; import com.thoughtworks.go.domain.MaterialRevision; import com.thoughtworks.go.domain.MaterialRevisions; import com.thoughtworks.go.domain.NullStage; import com.thoughtworks.go.domain.Pipeline; import com.thoughtworks.go.domain.Stage; import com.thoughtworks.go.domain.TaskProperty; import com.thoughtworks.go.domain.buildcause.BuildCause; import com.thoughtworks.go.domain.materials.dependency.DependencyMaterialRevision; import com.thoughtworks.go.helper.*; import com.thoughtworks.go.helper.GoConfigMother; import com.thoughtworks.go.server.service.UpstreamPipelineResolver; import com.thoughtworks.go.util.ReflectionUtil; import org.hamcrest.core.Is; import org.junit.After; import org.junit.Before; import org.junit.Test; import static com.thoughtworks.go.util.DataStructureUtils.m; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; import static org.junit.matchers.JUnitMatchers.hasItems; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verifyNoMoreInteractions; public class FetchTaskTest { private static final String LABEL = "1.01"; private PipelineConfig downstream; private PipelineConfig upstream; private PipelineConfig uppestStream; private PipelineConfig randomPipeline; private PipelineConfig uppestLookalike; private CruiseConfig config; private UpstreamPipelineResolver resolver; @Before public void setUp() { config = GoConfigMother.configWithPipelines("random_pipeline", "uppest_lookalike", "uppest_stream", "upstreams_peer", "upstream", "downstream", "dummy"); randomPipeline = config.pipelineConfigByName(new CaseInsensitiveString("random_pipeline")); randomPipeline.add(StageConfigMother.stageConfig("random-stage1", new JobConfigs(new JobConfig("random-job1")))); uppestLookalike = config.pipelineConfigByName(new CaseInsensitiveString("uppest_lookalike")); uppestLookalike.add(StageConfigMother.stageConfig("uppest-stage1", new JobConfigs(new JobConfig("uppest-job1")))); uppestStream = config.pipelineConfigByName(new CaseInsensitiveString("uppest_stream")); uppestStream.add(StageConfigMother.stageConfig("uppest-stage1", new JobConfigs(new JobConfig("uppest-job1")))); uppestStream.add(StageConfigMother.stageConfig("uppest-stage2", new JobConfigs(new JobConfig("uppest-job2")))); uppestStream.add(StageConfigMother.stageConfig("uppest-stage3", new JobConfigs(new JobConfig("uppest-job3")))); upstream = config.pipelineConfigByName(new CaseInsensitiveString("upstream")); upstream.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("uppest_stream", "uppest-stage2"))); upstream.add(StageConfigMother.stageConfig("up-stage1", new JobConfigs(new JobConfig("up-job1")))); upstream.add(StageConfigMother.stageConfig("up-stage2", new JobConfigs(new JobConfig("up-job2")))); downstream = config.pipelineConfigByName(new CaseInsensitiveString("downstream")); downstream.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("upstream", "up-stage1"))); downstream.get(0).getJobs().get(0).addTask(new FetchTask(new CaseInsensitiveString("foo"), new CaseInsensitiveString("bar"), new CaseInsensitiveString("baz"), "abcd", "efg")); resolver = mock(UpstreamPipelineResolver.class); } @After public void tearDown() { verifyNoMoreInteractions(resolver); } @Test public void validate_withinTemplates_shouldPopulateErrorOnSrcFileOrSrcDirOrDestIfIsNotAValidFilePathPattern() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "..", ".."); ValidationContext context = ValidationContext.forChain(config, new TemplatesConfig(), downstream.getStage(new CaseInsensitiveString("stage"))); task.validate(context); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.SRC), is("Invalid src name '..'. It should be a valid relative path.")); assertThat(task.errors().on(FetchTask.DEST), is("Invalid dest name '..'. It should be a valid relative path.")); task.setSrcfile(null); task.setSrcdir(".."); task.validate(context); assertThat(task.errors().on(FetchTask.SRC), is("Invalid src name '..'. It should be a valid relative path.")); } @Test public void validate_shouldPopulateErrorOnSrcFileOrSrcDirOrDestIfIsNotAValidFilePathPattern() { FetchTask task = new FetchTask(new CaseInsensitiveString(""), new CaseInsensitiveString(""), new CaseInsensitiveString(""), "", ""); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage( new CaseInsensitiveString("stage")).getJobs().get(0))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("Stage is a required field.")); assertThat(task.errors().on(FetchTask.SRC), is("Should provide either srcdir or srcfile")); } @Test public void validate_shouldNotTryAndValidateWhenWithinTemplate() throws Exception { FetchTask task = new FetchTask(new CaseInsensitiveString("dummy"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "src", "dest"); task.validate(ValidationContext.forChain(config, new TemplatesConfig(), downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), Is.is(true)); } @Test public void validate_shouldValidateBlankStageAndJobWhenWithinTemplate() throws Exception { FetchTask task = new FetchTask(new CaseInsensitiveString("dummy"), new CaseInsensitiveString(""), new CaseInsensitiveString(""), "src", "dest"); task.validate(ValidationContext.forChain(config, new TemplatesConfig(), downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), Is.is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("Stage is a required field.")); assertThat(task.errors().on(FetchTask.JOB), is("Job is a required field.")); } @Test public void shouldPopulateErrorsIfFetchArtifactFromPipelineThatIsNotDependency() { FetchTask task = new FetchTask(new CaseInsensitiveString("dummy"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.PIPELINE_NAME), is("Pipeline \"downstream\" tries to fetch artifact from pipeline " + "\"dummy\" which is not an upstream pipeline")); } @Test public void shouldPopulateErrorsIfFetchArtifactDoesNotHaveStageAndOrJobDefined() { FetchTask task = new FetchTask(new CaseInsensitiveString(""), new CaseInsensitiveString(""), new CaseInsensitiveString(""), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, new StageConfig(), new JobConfig())); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("Stage is a required field.")); assertThat(task.errors().on(FetchTask.JOB), is("Job is a required field.")); } @Test public void shouldBeValidWhenFetchArtifactIsFromAnyAncestorStage_onTheUpstreamPipeline() { FetchTask task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage2"), new CaseInsensitiveString("uppest-job2"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(true)); } @Test public void shouldBeValidWhenFetchArtifactIsFromAnyAncestorStage_s_predecessorStage__onTheUpstreamPipeline() { FetchTask task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage1"), new CaseInsensitiveString("uppest-job1"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(true)); } @Test public void should_NOT_BeValidWhenFetchArtifactIsFromAnyAncestorStage_s_successorStage_onTheUpstreamPipeline() { FetchTask task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage3"), new CaseInsensitiveString("uppest-job3"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("Pipeline \"downstream\" tries to fetch artifact from stage \"uppest_stream :: uppest-stage3\" which does not complete before \"downstream\" pipeline's dependencies.")); } @Test public void should_NOT_BeValidWhen_pathFromAncestor_isInvalid_becauseRefferedPipelineIsNotAnAncestor() { FetchTask task = new FetchTask(new CaseInsensitiveString("random_pipeline/upstream"), new CaseInsensitiveString("random-stage1"), new CaseInsensitiveString("random-job1"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.PIPELINE_NAME), is("Pipeline named 'random_pipeline' exists, but is not an ancestor of 'downstream' as declared in 'random_pipeline/upstream'.")); } @Test public void should_NOT_BeValidWhen_NO_pathFromAncestorIsGiven_butAncestorPipelineIsBeingFetchedFrom() { FetchTask task = new FetchTask(null, new CaseInsensitiveString("uppest-stage3"), new CaseInsensitiveString("uppest-job3"), "src", "dest"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, stage, stage.getJobs().get(0))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("Pipeline \"downstream\" tries to fetch artifact from stage \"downstream :: uppest-stage3\" which does not exist. It is used in stage \"stage\" inside job \"job\".")); } @Test public void should_BeValidWhen_hasAnAlternatePathToAncestor() { PipelineConfig upstreamsPeer = config.pipelineConfigByName(new CaseInsensitiveString("upstreams_peer")); upstreamsPeer.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("uppest_stream", "uppest-stage1"))); upstreamsPeer.add(StageConfigMother.stageConfig("peer-stage", new JobConfigs(new JobConfig("peer-job")))); downstream = config.pipelineConfigByName(new CaseInsensitiveString("downstream")); downstream.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("upstream", "up-stage1"), MaterialConfigsMother.dependencyMaterialConfig("upstreams_peer", "peer-stage"))); FetchTask task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstream"), new CaseInsensitiveString("uppest-stage1"), new CaseInsensitiveString("uppest-job1"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(true)); task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstreams_peer"), new CaseInsensitiveString("uppest-stage1"), new CaseInsensitiveString("uppest-job1"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(true)); } @Test public void should_NOT_BeValidWhen_ImmediateParentDeclaredInPathFromAncestor_isNotAParentPipeline() { PipelineConfig upstreamsPeer = config.pipelineConfigByName(new CaseInsensitiveString("upstreams_peer")); upstreamsPeer.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("uppest_stream", "uppest-stage1"))); upstreamsPeer.add(StageConfigMother.stageConfig("peer-stage", new JobConfigs(new JobConfig("peer-job")))); downstream = config.pipelineConfigByName(new CaseInsensitiveString("downstream")); downstream.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("upstream", "up-stage1"), MaterialConfigsMother.dependencyMaterialConfig("upstreams_peer", "peer-stage"))); FetchTask task = new FetchTask(new CaseInsensitiveString("upstream/uppest_stream"), new CaseInsensitiveString("up-stage1"), new CaseInsensitiveString("up-job1"), "src", "dest"); StageConfig stage = downstream.getStage(new CaseInsensitiveString("stage")); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, stage, stage.getJobs().get(0))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.PIPELINE_NAME), is("Pipeline named 'uppest_stream' exists, but is not an ancestor of 'downstream' as declared in 'upstream/uppest_stream'.")); } @Test public void should_NOT_BeValidWhen_stageMayNotHaveRunViaTheGivenPath_evenThoughItMayHaveActuallyRunAccordingToAnAlternatePath() {//TODO: Please fix this if someone cares about this corner case working -jj PipelineConfig upstreamsPeer = config.pipelineConfigByName(new CaseInsensitiveString("upstreams_peer")); upstreamsPeer.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("uppest_stream", "uppest-stage1"))); upstreamsPeer.add(StageConfigMother.stageConfig("peer-stage", new JobConfigs(new JobConfig("peer-job")))); downstream = config.pipelineConfigByName(new CaseInsensitiveString("downstream")); downstream.setMaterialConfigs(new MaterialConfigs(MaterialConfigsMother.dependencyMaterialConfig("upstream", "up-stage1"), MaterialConfigsMother.dependencyMaterialConfig("upstreams_peer", "peer-stage"))); FetchTask task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstreams_peer"), new CaseInsensitiveString("uppest-stage1"), new CaseInsensitiveString("uppest-job1"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(true)); task = new FetchTask(new CaseInsensitiveString("uppest_stream/upstreams_peer"), new CaseInsensitiveString("uppest-stage2"), new CaseInsensitiveString("uppest-job2"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("Pipeline \"downstream\" tries to fetch artifact from stage \"uppest_stream :: uppest-stage2\" which does not complete before \"downstream\" pipeline's dependencies.")); } @Test public void shouldFailWhenFetchArtifactIsFromAnyStage_AFTER_theDependencyStageOnTheUpstreamPipeline() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("up-stage2"), new CaseInsensitiveString("up-job2"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("Pipeline \"downstream\" tries to fetch artifact from stage \"upstream :: up-stage2\" which does not complete before \"downstream\" pipeline's dependencies.")); } @Test public void shouldPopulateErrorIfFetchArtifactFromDependentPipelineButStageDoesNotExist() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage-does-not-exist"), new CaseInsensitiveString("job"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")), downstream.getStage( new CaseInsensitiveString("stage")), downstream.getStage(new CaseInsensitiveString("stage")).getJobs().get(0))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.STAGE), is("Pipeline \"downstream\" tries to fetch artifact from stage " + "\"upstream :: stage-does-not-exist\" which does not exist. It is used in stage \"stage\" inside job \"job\".")); } @Test public void shouldPopulateErrorIfFetchArtifactFromDependentPipelineButJobNotExist() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job-does-not-exist"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.JOB), is("Pipeline \"downstream\" tries to fetch artifact from job " + "\"upstream :: stage :: job-does-not-exist\" which does not exist.")); } @Test public void shouldBeValidIfFetchArtifactUsingADependantPipeline() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("up-stage1"), new CaseInsensitiveString("up-job1"), "src", "dest"); task.validate(ValidationContext.forChain(config, downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(true)); } @Test public void shouldBeValidIfFetchArtifactUsingAStageBeforeCurrentInTheSamePipeline() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "src", "dest"); task.validate(ValidationContext.forChain(config, upstream, upstream.getStage(new CaseInsensitiveString("up-stage1")))); assertThat(task.errors().isEmpty(), is(true)); } @Test public void shouldBeValidIfFetchArtifactDoesNotSpecifyPipeline() { FetchTask task = new FetchTask(new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "src", "dest"); task.validate(ValidationContext.forChain(config, upstream, upstream.getStage(new CaseInsensitiveString("up-stage1")))); assertThat(task.errors().isEmpty(), is(true)); } // The Fetch Task is now case insensitive to job names . This behavior has changed from before migration of validations // to inside Config Objects for clicky admin. It was earlier Case SENSITIVE. This was done to address #4970 // -Jake @Test public void shouldPopulateErrorsIfFetchArtifactUsingJobNameWithDifferentCase() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("JOB"), "src", "dest"); task.validate(ValidationContext.forChain(config, new PipelineConfigs(), downstream, downstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(true)); } @Test public void shouldPopulateErrorIfSrcFileAndSrcDirBothAreDefined() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "src_file", "dest"); task.setSrcdir("src_dir"); task.validate(ValidationContext.forChain(config, upstream, upstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.SRC), is("Only one of srcfile or srcdir is allowed at a time")); } @Test public void shouldPopulateErrorIfBothSrcFileAndSrcDirAreNotDefined() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "src_file", "dest"); task.setSrcfile(null); task.validate(ValidationContext.forChain(config, upstream, upstream.getStage(new CaseInsensitiveString("stage")))); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.SRC), is("Should provide either srcdir or srcfile")); } @Test public void shouldPopulateErrorOnSrcFileOrSrcDirOrDestIfIsNotAValidFilePathPattern() { FetchTask task = new FetchTask(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("stage"), new CaseInsensitiveString("job"), "..", ".."); ValidationContext context = ValidationContext.forChain(config, upstream, upstream.getStage(new CaseInsensitiveString("stage"))); task.validate(context); assertThat(task.errors().isEmpty(), is(false)); assertThat(task.errors().on(FetchTask.SRC), is("Invalid src name '..'. It should be a valid relative path.")); assertThat(task.errors().on(FetchTask.DEST), is("Invalid dest name '..'. It should be a valid relative path.")); task.setSrcfile(null); task.setSrcdir(".."); task.validate(context); assertThat(task.errors().on(FetchTask.SRC), is("Invalid src name '..'. It should be a valid relative path.")); } @Test public void shouldIndicateSourceIsAFileBasedOnValuePopulated() { FetchTask fetchTask = new FetchTask(); fetchTask.setSrcfile("a.txt"); assertThat(fetchTask.isSourceAFile(), is(true)); FetchTask fetchTaskWithDir = new FetchTask(); fetchTaskWithDir.setSrcdir("/a/b"); assertThat(fetchTaskWithDir.isSourceAFile(), is(false)); } @Test public void shouldReturnSrcFileWhenSrcFileIsNotEmpty() throws Exception { FetchTask fetchTask = new FetchTask(); fetchTask.setSrcfile("a.jar"); assertThat(fetchTask.getSrc(), is("a.jar")); } @Test public void shouldReturnSrcDirWhenSrcDirIsNotEmpty() throws Exception { FetchTask fetchTask = new FetchTask(); fetchTask.setSrcdir("folder"); assertThat(fetchTask.getSrc(), is("folder")); } @Test public void shouldNormalizeDest() throws Exception { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("mingle"), new CaseInsensitiveString("dev"), new CaseInsensitiveString("windows-3"), "cruise-output/console.log", "dest\\subfolder"); assertThat(fetchTask.getDest(), is("dest/subfolder")); } @Test public void shouldNormalizeSrcFile() throws Exception { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("mingle"), new CaseInsensitiveString("dev"), new CaseInsensitiveString("windows-3"), "cruise-output\\console.log", "dest\\subfolder"); assertThat(fetchTask.getSrc(), is("cruise-output/console.log")); } @Test public void shouldNormalizeSrcDir() throws Exception { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("mingle"), new CaseInsensitiveString("dev"), new CaseInsensitiveString("windows-3"), "", "dest\\subfolder"); fetchTask.setSrcdir("testfolder\\subfolder"); assertThat(fetchTask.getSrc(), is("testfolder/subfolder")); } @Test public void describeTestForSrcFile() throws Exception { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("mingle"), new CaseInsensitiveString("dev"), new CaseInsensitiveString("windows-3"), "cruise.zip", "dest\\subfolder"); assertThat(fetchTask.describe(), is("<fetchartifact pipeline=\"mingle\" stage=\"dev\" job=\"windows-3\" " + "srcfile=\"cruise.zip\" dest=\"dest\\subfolder\" />")); } @Test public void describeTestForSrcDir() throws Exception { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("mingle"), new CaseInsensitiveString("dev"), new CaseInsensitiveString("windows-3"), "", "dest\\subfolder"); fetchTask.setSrcdir("cruise-output"); assertThat(fetchTask.describe(), is("<fetchartifact pipeline=\"mingle\" stage=\"dev\" job=\"windows-3\" " + "srcdir=\"cruise-output\" dest=\"dest\\subfolder\" />")); } @Test public void describeTestForSrcDirAndSrcFile() throws Exception { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("mingle"), new CaseInsensitiveString("dev"), new CaseInsensitiveString("windows-3"), "cruise.zip", "dest\\subfolder"); fetchTask.setSrcdir("cruise-output"); assertThat(fetchTask.describe(), is("<fetchartifact pipeline=\"mingle\" stage=\"dev\" job=\"windows-3\" srcfile=\"cruise.zip\" " + "srcdir=\"cruise-output\" dest=\"dest\\subfolder\" />")); } @Test public void shouldUpdateItsAttributesFromAttributeMap() throws Exception { FetchTask fetchTask = new FetchTask(); fetchTask.setConfigAttributes( m(FetchTask.PIPELINE_NAME, "pipeline_foo", FetchTask.STAGE, "stage_bar", FetchTask.JOB, "job_baz", FetchTask.SRC, "src_file", FetchTask.DEST, "dest_dir", FetchTask.IS_SOURCE_A_FILE, "1")); assertThat(fetchTask.getTargetPipelineName(), is(new CaseInsensitiveString("pipeline_foo"))); assertThat(fetchTask.getStage(), is(new CaseInsensitiveString("stage_bar"))); assertThat(fetchTask.getJob().toString(), is("job_baz")); assertThat(fetchTask.getSrcfile(), is("src_file")); assertThat(fetchTask.getSrcdir(), is(nullValue())); assertThat(fetchTask.getDest(), is("dest_dir")); fetchTask.setConfigAttributes(m(FetchTask.PIPELINE_NAME, "", FetchTask.STAGE, "", FetchTask.JOB, "", FetchTask.SRC, "", FetchTask.IS_SOURCE_A_FILE, "1", FetchTask.DEST, "")); assertThat(fetchTask.getTargetPipelineName(), is(new CaseInsensitiveString(""))); assertThat(fetchTask.getStage(), is(new CaseInsensitiveString(""))); assertThat(fetchTask.getJob().toString(), is("")); assertThat(fetchTask.getSrcfile(), is(nullValue())); assertThat(fetchTask.getSrcdir(), is(nullValue())); assertThat(fetchTask.getDest(), is("")); } @Test public void shouldSetSrcFileToNullIfSrcDirIsDefined() throws Exception { FetchTask fetchTask = new FetchTask(); fetchTask.setConfigAttributes( m(FetchTask.PIPELINE_NAME, "pipeline_foo", FetchTask.STAGE, "stage_bar", FetchTask.JOB, "job_baz", FetchTask.IS_SOURCE_A_FILE, "0", FetchTask.SRC, "src_dir", FetchTask.DEST, "dest_dir")); assertThat(fetchTask.getSrcfile(), is(nullValue())); assertThat(fetchTask.getSrcdir(), is("src_dir")); } @Test public void shouldSetSrcFileToNullWhenSrcDirIsUpdated() { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("pname"), new CaseInsensitiveString("sname"), new CaseInsensitiveString("jname"), "sfile", "dest"); fetchTask.setConfigAttributes( m(FetchTask.PIPELINE_NAME, "pipeline_foo", FetchTask.STAGE, "stage_bar", FetchTask.JOB, "job_baz", FetchTask.IS_SOURCE_A_FILE, "0", FetchTask.SRC, "src_dir", FetchTask.DEST, "dest_dir")); assertThat(fetchTask.getSrcfile(), is(nullValue())); assertThat(fetchTask.getSrcdir(), is("src_dir")); } @Test public void shouldNotUpdateItsAttributesFromAttributeMapWhenKeysNotPresent() throws Exception { FetchTask fetchTask = new FetchTask(); fetchTask.setConfigAttributes( m(FetchTask.PIPELINE_NAME, "pipeline_foo", FetchTask.STAGE, "stage_bar", FetchTask.JOB, "job_baz", FetchTask.SRC, "src_file", FetchTask.IS_SOURCE_A_FILE, "1", FetchTask.SRC, "src_file", FetchTask.DEST, "dest_dir")); fetchTask.setConfigAttributes(m()); assertThat(fetchTask.getTargetPipelineName(), is(new CaseInsensitiveString("pipeline_foo"))); assertThat(fetchTask.getStage(), is(new CaseInsensitiveString("stage_bar"))); assertThat(fetchTask.getJob().toString(), is("job_baz")); assertThat(fetchTask.getSrcfile(), is("src_file")); assertThat(fetchTask.getSrcdir(), is(nullValue())); assertThat(fetchTask.getDest(), is("dest_dir")); } @Test public void shouldUpdateDestToNullIfDestIsEmptyInAttributeMap_SoThatItDoesNotGetSerializedInXml() { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("mingle"), new CaseInsensitiveString("dev"), new CaseInsensitiveString("one"), "", "dest"); fetchTask.setConfigAttributes(m(FetchTask.DEST, "")); assertThat(fetchTask, is(new FetchTask(new CaseInsensitiveString("mingle"), new CaseInsensitiveString("dev"), new CaseInsensitiveString("one"), "", null))); } @Test public void shouldPopulateAllFieldsInReturnedPropertiesForDisplay() { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("foo-pipeline"), new CaseInsensitiveString("bar-stage"), new CaseInsensitiveString("baz-job"), "quux.c", "bang-file"); assertThat(fetchTask.getPropertiesForDisplay(), hasItems(new TaskProperty("PIPELINE_NAME", "foo-pipeline", "pipeline_name"), new TaskProperty("STAGE_NAME", "bar-stage", "stage_name"), new TaskProperty("JOB_NAME", "baz-job", "job_name"), new TaskProperty("SRC_FILE", "quux.c", "src_file"), new TaskProperty("DEST_FILE", "bang-file", "dest_file"))); assertThat(fetchTask.getPropertiesForDisplay().size(), is(5)); fetchTask = new FetchTask(new CaseInsensitiveString("foo-pipeline"), new CaseInsensitiveString("bar-stage"), new CaseInsensitiveString("baz-job"), null, "bang-file"); fetchTask.setSrcdir("foo/src"); assertThat(fetchTask.getPropertiesForDisplay(), hasItems(new TaskProperty("PIPELINE_NAME", "foo-pipeline", "pipeline_name"), new TaskProperty("STAGE_NAME", "bar-stage", "stage_name"), new TaskProperty("JOB_NAME", "baz-job", "job_name"), new TaskProperty("SRC_DIR", "foo/src", "src_dir"), new TaskProperty("DEST_FILE", "bang-file", "dest_file"))); assertThat(fetchTask.getPropertiesForDisplay().size(), is(5)); fetchTask = new FetchTask(new CaseInsensitiveString(null), new CaseInsensitiveString("bar-stage"), new CaseInsensitiveString("baz-job"), null, null); assertThat(fetchTask.getPropertiesForDisplay(), hasItems(new TaskProperty("STAGE_NAME", "bar-stage", "stage_name"), new TaskProperty("JOB_NAME", "baz-job", "job_name"))); assertThat(fetchTask.getPropertiesForDisplay().size(), is(2)); } @Test public void shouldCreateChecksumPath() { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("up-pipeline"), new CaseInsensitiveString("bar-stage"), new CaseInsensitiveString("baz-job"), "quux.c", "bang-file"); String checksumPath = fetchTask.checksumPath(); assertThat(checksumPath, is("up-pipeline_bar-stage_baz-job_md5.checksum")); } @Test public void shouldCreateArtifactDest() { FetchTask fetchTask = new FetchTask(new CaseInsensitiveString("up-pipeline"), new CaseInsensitiveString("bar-stage"), new CaseInsensitiveString("baz-job"), "quux.c", "dest-dir"); File artifactDest = fetchTask.artifactDest("foo-pipeline", "file-name-in-dest"); assertThat(artifactDest, is(new File("pipelines/foo-pipeline/dest-dir/file-name-in-dest"))); } @Test public void shouldNotPopulatePropertyForPipelineWhenPipelineIsNull() { FetchTask fetchTask = new FetchTask(null, new CaseInsensitiveString("bar-stage"), new CaseInsensitiveString("baz-job"), "quux.c", "bang-file"); // is null when no pipeline name is specified in config xml (manual entry) ReflectionUtil.setField(fetchTask, "pipelineName", null); assertThat(fetchTask.getPropertiesForDisplay(), hasItems(new TaskProperty("STAGE_NAME", "bar-stage", "stage_name"), new TaskProperty("JOB_NAME", "baz-job", "job_name"), new TaskProperty("SRC_FILE", "quux.c", "src_file"), new TaskProperty("DEST_FILE", "bang-file", "dest_file"))); assertThat(fetchTask.getPropertiesForDisplay().size(), is(4)); } private Pipeline pipeline(String label) { Pipeline pipeline = createPipeline("cruise", new NullStage("Stage")); pipeline.setLabel(label); return pipeline; } private Pipeline createPipeline(String pipelineName, Stage stage) { Materials materials = MaterialsMother.defaultMaterials(); return new Pipeline(pipelineName, BuildCause.createWithModifications(ModificationsMother.modifyOneFile(materials, ModificationsMother.nextRevision()), ""), stage); } private String getSrc() { return ""; } private Pipeline pipelineWithStage(String pipelineName, int pipelineCounter, String label, String stagename, int stageCounter) { Stage stage = StageMother.custom(stagename); stage.setCounter(stageCounter); Pipeline pipeline = createPipeline(pipelineName, stage); pipeline.setCounter(pipelineCounter); pipeline.setLabel(label); return pipeline; } private Pipeline pipelineWithDepencencyMaterial(String currentPipeline, String upstreamPipelineName, int upstreamPipelineCounter, String upstreamPipelineLabel, String upstreamStageName, int upstreamStageCounter) { Pipeline pipeline = createPipeline(currentPipeline, new NullStage("Stage")); pipeline.setBuildCause( buildCauseWithDependencyMaterial(upstreamPipelineName, upstreamPipelineCounter, upstreamPipelineLabel, upstreamStageName, upstreamStageCounter)); return pipeline; } private BuildCause buildCauseWithDependencyMaterial(String upstreamPipelineName, int upstreamPipelineCounter, String upstreamPipelineLabel, String upstreamStageName, int upstreamStageCounter) { BuildCause buildCause = BuildCause.createWithEmptyModifications(); MaterialRevisions materialRevisions = new MaterialRevisions(); DependencyMaterialRevision materialRevision = DependencyMaterialRevision.create(upstreamPipelineName, upstreamPipelineCounter, upstreamPipelineLabel, upstreamStageName, upstreamStageCounter); MaterialRevision withRevision = materialRevision.convert(new DependencyMaterial( new CaseInsensitiveString(upstreamPipelineName), new CaseInsensitiveString(upstreamStageName)), new Date()); materialRevisions.addRevision(withRevision); buildCause.setMaterialRevisions(materialRevisions); return buildCause; } }
/** * Package: MAG - VistA Imaging WARNING: Per VHA Directive 2004-038, this routine should not be modified. Date Created: September 26, 2006 Site Name: Washington OI Field Office, Silver Spring, MD Developer: VHAISWPETERB Description: ;; +--------------------------------------------------------------------+ ;; Property of the US Government. ;; No permission to copy or redistribute this software is given. ;; Use of unreleased versions of this software requires the user ;; to execute a written test agreement with the VistA Imaging ;; Development Office of the Department of Veterans Affairs, ;; telephone (301) 734-0100. ;; ;; The Food and Drug Administration classifies this software as ;; a Class II medical device. As such, it may not be changed ;; in any way. Modifications to this software may result in an ;; adulterated medical device under 21CFR820, the use of which ;; is considered to be a violation of US Federal Statutes. ;; +--------------------------------------------------------------------+ */ package gov.va.med.imaging.dicom.dcftoolkit.utilities.reconstitution; import gov.va.med.imaging.SizedInputStream; import gov.va.med.imaging.dicom.common.interfaces.IDicomDataSet; import gov.va.med.imaging.dicom.dcftoolkit.common.impl.DicomDataSetImpl; import gov.va.med.imaging.exceptions.TextFileException; import gov.va.med.imaging.exceptions.TextFileExtractionException; import gov.va.med.imaging.exceptions.TextFileNotFoundException; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.InputStreamReader; import java.nio.ByteBuffer; import java.nio.ShortBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.regex.Pattern; import org.apache.log4j.Logger; import com.lbs.DCS.AttributeTag; import com.lbs.DCS.DCM; import com.lbs.DCS.DCSException; import com.lbs.DCS.DicomCSElement; import com.lbs.DCS.DicomDataDictionary; import com.lbs.DCS.DicomDataSet; import com.lbs.DCS.DicomElement; import com.lbs.DCS.DicomElementFactory; import com.lbs.DCS.DicomFDElement; import com.lbs.DCS.DicomFLElement; import com.lbs.DCS.DicomOBElement; import com.lbs.DCS.DicomOWElement; import com.lbs.DCS.DicomSLElement; import com.lbs.DCS.DicomSQElement; import com.lbs.DCS.DicomSSElement; import com.lbs.DCS.DicomULElement; import com.lbs.DCS.DicomUSElement; /** * * Text File Parser class. This parser is specific to the Vista Imaging Legacy * environment. The class will parse the Text file passed from Archiving * and build a new DicomDataSet from the Text file information. Then the class shall * wrap the DicomDataSet object into a generic DicomDataSet object. The wrapping allows * the DicomDataSet object to be passed to the Dicom Generic Layer. * * @author William Peterson * extended by Csaba Titton * for ViX streaming */ //NOTE 0008,1032/0008,0103 and // 0040,0260/0008,0103 and // 0040,0275/0040,0008/0008,0103 has no value in output14.dcm. Investigated the issue. // This is acceptable according to the DICOM Standard. public class LegacyTextFileParser { /* * Create a DicomDataSet object. This object is DCF Toolkit specific. */ private DicomDataSet dicomDataSet = null; private IDicomDataSet toolkitDDS = null; private OriginalPixelDataInfo originalPixelData = null; private static Logger logger = Logger.getLogger (LegacyTextFileParser.class); private static Logger testLogger = Logger.getLogger("JUNIT_TEST"); /** * Constructor */ public LegacyTextFileParser() { super(); } /** * Invoke method to create a DicomDataSet based on a Text file. * * @param textFilename represents the name (and path) of the Text file. * @return represents the encapsulated DicomDataSet that is safe for DicomGeneric Layer. * @throws TextFileNotFoundException * @throws TextFileException * @throws TextFileExtractionException */ public IDicomDataSet createDicomDataSet(String textFilename, OriginalPixelDataInfo pixelData) throws TextFileNotFoundException, TextFileException, TextFileExtractionException{ logger.info(this.getClass().getName()+": Dicom Toolkit Layer: " + "...parsing Text file into DicomDataSet."); logger.debug("Text File: "+ textFilename); this.originalPixelData = pixelData; BufferedReader buffer = null; try{ //Get Text file. //JUNIT Create test to verify how this fails if not correct permissions. buffer = new BufferedReader(new FileReader(textFilename)); //Invoke parser. this.parseTextFile(buffer, true); //REMINDER Find out why I have the following line. It does not make sense, but // I don't want to change it now. Unsure of effects if omitted. pixelData = this.originalPixelData; //encapsulate DicomDataSet object. return (this.encapsulateDicomDataSet()); } catch(FileNotFoundException noFile){ logger.error(noFile.getMessage()); logger.error(this.getClass().getName()+": Dicom Toolkit layer: " + "Exception thrown while attempting to open "+textFilename+"."); throw new TextFileNotFoundException("Could not find or open "+textFilename+".", noFile); } finally{ if(buffer != null){ try{ buffer.close(); } catch(Throwable T){ logger.error(this.getClass().getName()+": Dicom Toolkit layer: "+ "Exception thrown while closing Text File "+textFilename+"."); } System.gc(); } } } /** * Invoke method to create a DicomDataSet based on a Text data stream. * * @param textStream represents the stream of VistA Imaging TXT data. * @return represents the encapsulated DicomDataSet that is safe for DicomGeneric Layer. * @throws TextFileNotFoundException * @throws TextFileException * @throws TextFileExtractionException */ public IDicomDataSet createDicomDataSet(BufferedReader buffer, OriginalPixelDataInfo pixelData) throws TextFileException, TextFileExtractionException { logger.info("... Dicom Toolkit Layer: parsing Text data into DicomDataSet ..."); logger.debug("... Start Text Data Stream parsing... "); this.originalPixelData = pixelData; //Invoke parser. this.parseTextFile(buffer, false); pixelData = this.originalPixelData; //encapsulate DicomDataSet object. return (this.encapsulateDicomDataSet()); } /** * Encapsulates the DCF Toolkit specific DicomDataSet object. * * @return represents the Generic DicomDataSet object. */ private IDicomDataSet encapsulateDicomDataSet(){ testLogger.info("... encapsulating DDS ..."); try{ //toolkitDDS = (IDicomDataSet)SpringContext.getContext().getBean("DicomDataSet"); //toolkitDDS.setDicomDataSet(dicomDataSet); toolkitDDS = new DicomDataSetImpl(dicomDataSet); } catch(Exception e){ logger.error("Error: " + e.getMessage()); logger.error(this.getClass().getName()+": Dicom Toolkit layer: " + "Exception thrown while encapsulating Dicom Dataset."); e.printStackTrace(); } return toolkitDDS; } /** * Invoke method to extract HIS updates from an open Text data stream. * * @param buffer represents the stream of VistA Imaging TXT data. * @return HashMap of DICOm tag-value pairs to be updated in DICOM DataSet. * @throws TextFileExtractionException */ public HashMap<String, String> getHisUpdates(BufferedReader buffer) throws TextFileException, TextFileExtractionException { logger.info("... Parsing text data HIS update section ..."); logger.debug("... Continue Text Data parsing for VistA updates... "); HashMap<String, String> hisChanges=null; hisChanges = this.parseHisUpdates(buffer); try { buffer.close(); } catch(IOException io){ logger.error("Cannot close Text Stream Buffer."); throw new TextFileExtractionException(); } return (hisChanges); } /** * Invoke method to extract HIS updates from an open Text data stream. * * @param buffer represents the stream of VistA Imaging TXT data. * @return HashMap of DICOm tag-value pairs to be updated in DICOM DataSet. * @throws TextFileExtractionException */ public HashMap<String, String> extractHisUpdatesfromTextStream(SizedInputStream sizedTextStream) throws TextFileException, TextFileExtractionException { logger.info("... Parsing text data HIS update section ..."); logger.debug("... Start parsing VistA updates... "); HashMap<String, String> hisChanges=null; BufferedReader buffer = new BufferedReader(new InputStreamReader(sizedTextStream.getInStream())); hisChanges = this.parseHisUpdates(buffer); try { buffer.close(); } catch(IOException io){ logger.error("Cannot close Text Stream Buffer."); throw new TextFileExtractionException(); } return (hisChanges); } /** * Parse the Text file. The Text file is made up of two sections, "Data1" and * "DICOM Data". Both sections are read and decoded. * * @param buffer represents the Text file now in the form of a BufferReader object. * @throws TextFileException * @throws TextFileExtractionException */ private void parseTextFile(BufferedReader buffer, boolean doClose) throws TextFileException, TextFileExtractionException{ String textLine = ""; testLogger.info("... Parsing text data top section ..."); try{ //Loop thru the lines until $$BEGIN DATA1. //Ignore each line until $$BEGIN DATA1 do{ textLine = this.getNextTextLine(buffer); } while(!(textLine.equals("$$BEGIN DATA1"))); do{ textLine = this.getNextTextLine(buffer); //Extract only the PATIENTS_XXX fields //Replace commas with carats in Patient's Name field //100507-WFP-Removing all IF statements except DCM_TO_TGA_PARAMETERS. // Reason is the application does not use this information. It serves // no purpose. But I'm leaving the code in case I'm wrong. /* if(textLine.startsWith("PATIENTS_NAME")){ String patientsName = textLine.substring(textLine.indexOf("=")+1); } if(textLine.startsWith("PATIENTS_ID")){ String patientsID = textLine.substring(textLine.indexOf("=")+1); } if(textLine.startsWith("PATIENTS_BIRTH_DATE")){ String patientsBirth = textLine.substring(textLine.indexOf("=")+1); } if(textLine.startsWith("PATIENTS_AGE")){ String patientsAge = textLine.substring(textLine.indexOf("=")+1); } if(textLine.startsWith("PATIENTS_SEX")){ String patientsSex = textLine.substring(textLine.indexOf("=")+1); } if(textLine.startsWith("ACCESSION_NUMBER")){ String accessionNumber = textLine.substring(textLine.indexOf("=")+1); } */ if(textLine.startsWith("DCM_TO_TGA_PARAMETERS")){ String dcmtotgaParameters = textLine.substring(textLine.indexOf("=")+1); this.originalPixelData.setDcmtotgaParameters(dcmtotgaParameters); } //Place these PATIENTS_XXX fields into a temp DicomDataSet object. }while(!(textLine.equals("$$END DATA1"))); //Invoke parseDicomDataSection to parse rest of file. this.parseDicomDataSection(buffer); if (doClose) buffer.close(); } catch(IOException io){ logger.error("Cannot extract from Text File."); logger.error("Working on Line: " + textLine); throw new TextFileExtractionException(); } } /** * Parse the "DICOM Data" section of the Text file. This is the grunt of the work * that needs to be done. * * @param buffer represents the Text file now in the form of a BufferedReader object. * @throws TextFileExtractionException */ private void parseDicomDataSection(BufferedReader buffer) throws TextFileExtractionException{ this.dicomDataSet = new DicomDataSet(); String textDicomLine = ""; boolean sequenceFlag = false; DicomSQElement sequenceElement = null; testLogger.info("... Parsing text data Dicom DataSet section ..."); try{ //Mark Buffer. buffer.mark(255); //Declare lineArray object. ArrayList<String> lineArray = new ArrayList<String>(); //Loop thru the lines until $BEGIN DICOM DATA. //Ignore each line until $$BEGIN DICOM DATA. do{ textDicomLine = this.getNextTextLine(buffer); } while(!(textDicomLine.equals("$$BEGIN DICOM DATA"))); textDicomLine = this.getNextTextLine(buffer); // testLogger.debug("Current Line: " + textDicomLine); //Loop thru each line until $$END DICOM DATA. while(!(textDicomLine.equals("$$END DICOM DATA"))){ //Check for Odd Group. String checkGroup = textDicomLine.substring(0,4); //String checkElement = textDicomLine.substring(5,9); int i = Integer.parseInt(checkGroup, 16); if(this.isGroupToBeAdded(i)){ //If no Odd group or Group 88, //Check if 9th character in line is a "|". if(!(textDicomLine.substring(9,10).equals("|"))){ //If no, //Add string to lineArray object. lineArray.add(textDicomLine); //Set Sequence flag. sequenceFlag = true; } else{ //If yes, //Check if Sequence flag is set. if(sequenceFlag){ //If yes, //Reset Mark Buffer. This allows to pick up the element after the // sequence again. buffer.reset(); //Invoke extractSequenceData method and pass lineArray. sequenceElement = this.extractSequenceData(lineArray); //Add DicomSQElement object to dds. this.dicomDataSet.insert(sequenceElement); //Clean lineArray object. lineArray.clear(); //Unset the Sequence flag. sequenceFlag = false; } else{ //If no, //Invoke extractDicomElement method and pass the line. this.extractDicomElement(textDicomLine, this.dicomDataSet); } //End If for "|" delimiter. } //End If for Odd Group. } //Mark Buffer. buffer.mark(255); textDicomLine = this.getNextTextLine(buffer); // testLogger.debug("Current Line: " + textDicomLine); //End Loop due to $$END DICOM DATA or EOF. } } catch(IOException io){ logger.error(io.getMessage()); logger.error(this.getClass().getName()+": " + "Exception thrown while reading Text file's Dicom Data Section."); throw new TextFileExtractionException("Failure to read DicomData Section.", io); } catch(NumberFormatException number){ logger.error(number.getMessage()); logger.error(this.getClass().getName()+": Working on Dicom Line: " + textDicomLine); throw new TextFileExtractionException("Failure on Number Format.", number); } this.customTGAElementCleanup(); } /** * Recursive method to handle the Sequences and nested Sequences inside of the "DICOM * Data" section. * * @param lines represents the Array of Sequence lines in the Text file. * @return represents the Sequence lines converted into a single Dicom Sequence Element. * @throws TextFileExtractionException */ private DicomSQElement extractSequenceData(ArrayList<String> lines) throws TextFileExtractionException{ DicomDataSet seqDDS = new DicomDataSet(); ArrayList<DicomDataSet> ddsArrayList = new ArrayList<DicomDataSet>(); //Create DicomSQElement object. DicomSQElement sequence = null; String element = new String(""); //Initialize seqDDS index to 0. int ddsIndex = 0; //Create previouseSeqItem. String previousSeqItem = ""; //Set Sequence flag to false. boolean sequenceFlag = false; //Declare seqArray object. ArrayList<String> seqArray = new ArrayList<String>(); try{ //Loop thru each line of lines array until null. Grab line in Loop. for(int x=0; x<lines.size(); x++){ //Split line into two substrings using first period. String seqLine = (String)lines.get(x); String subLines[] = seqLine.split("\\.",2); //Assign first substring to Tag. element = subLines[0]; //Split second substring into two sub-strings using first carat. String subSubLines[] = subLines[1].split("\\^",2); //Assign first sub-substring to seqItem. String seqItem = subSubLines[0]; //Assign second sub-substring to seqArray object. String elementData = subSubLines[1]; if(previousSeqItem.equals("")){ //If yes, //Initialize previousSeqItem with seqItem. previousSeqItem = seqItem; } //Check if first sub-substring matches seqItem. if(!(previousSeqItem.equals(seqItem))){ //If no, ddsArrayList.add(seqDDS); seqDDS = new DicomDataSet(); previousSeqItem = seqItem; //End If } String checkGroup = elementData.substring(0,4); int g = Integer.parseInt(checkGroup, 16); if(this.isGroupToBeAdded(g)){ //Check if 9th character is a "|". This means another Sequence. //This if/else determines if the line is another sequence. if(!(elementData.substring(9,10).equals("|"))){ //If no, //Add string to seqArray object. seqArray.add(elementData); //Set Sequence flag. sequenceFlag = true; //If yes, } else{ //Check if Sequence flag is set. if(sequenceFlag){ //If yes, //Decrement lines array index. x--; //Re-invoke extractSequenceData method and pass lineArray. DicomSQElement subSequence = this.extractSequenceData(seqArray); //add DicomSQElement object to dds. seqDDS.insert(subSequence); //Unset the Sequence flag. sequenceFlag = false; } else{ //If no, //Invoke extractDicomElement method and pass the second substring and // and temp DicomDataSet object. this.extractDicomElement(elementData, seqDDS); //End If for Sequence flag. } //End If for "|" delimiter. } //End If for Group Check } //Increment lines array index. //Loop thru lines array is complete. } //Check if Sequence flag is set. if(sequenceFlag){ //If yes, //Re-invoke extractSequenceData method and pass lineArray. DicomSQElement subSequence = this.extractSequenceData(seqArray); //add DicomSQElement object to dds. seqDDS.insert(subSequence); //Unset the Sequence flag. sequenceFlag = false; } ddsArrayList.add(seqDDS); ddsIndex = ddsArrayList.size(); DicomDataSet ddsSeqItems[] = new DicomDataSet[ddsIndex]; for(int y=0; y<ddsIndex; y++){ ddsSeqItems[y] = ddsArrayList.get(y); } AttributeTag tag = new AttributeTag(element); sequence = new DicomSQElement(tag, ddsSeqItems); } catch(DCSException dcs){ logger.error(dcs.getMessage()); logger.error(this.getClass().getName()+": " + "Exception thrown while extracting Sequence Data."); throw new TextFileExtractionException("Failure to extract Sequence.", dcs); } return sequence; } /** * Extracts a single parsed line, independent of any sequences, and converts it to a DICOM * Element and stores the DICOM Elment into the desired DCF Toolkit specific * DicomDataSet. * * @param line represents the parsed line from the Text file. * @param dds represents the DCF Toolkit specific DicomDataSet object. * @throws TextFileExtractionException */ private void extractDicomElement(String line, DicomDataSet dds) throws TextFileExtractionException{ //Setup the ^ parser. String splitCaratPattern = "\\^"; Pattern pInfo = Pattern.compile(splitCaratPattern); //Setup the | parser. String splitPipePattern = "\\|"; Pattern pFields = Pattern.compile(splitPipePattern); //Setup the , parser. String splitCommaPattern = ","; Pattern pTag = Pattern.compile(splitCommaPattern); String fields[] = new String[4]; fields = pFields.split(line); //Parse the basic data. //Set the Tag variable. String tag = fields[0]; String elementInfo = fields[1]; short elementVR = ' '; try{ AttributeTag aTag = new AttributeTag(tag); //Get the VR. if(elementInfo.charAt(elementInfo.length()-3) == '^'){ String subElementInfo[] = new String[2]; subElementInfo = pInfo.split(line); elementVR = DicomDataDictionary.getVR(subElementInfo[1]); originalPixelData.setValueRepresentationInTextFile(true); } else{ elementVR = DicomDataDictionary.getElementVR(aTag); originalPixelData.setValueRepresentationInTextFile(false); } String multiplicity; //This IF makes sure the VM,ML values exist. if(fields.length > 2){ multiplicity = fields[2]; //This IF makes sure there is a comma delimiter. if(multiplicity.indexOf(",") >= 0){ String value; if(fields.length < 4){ value = ""; } else{ value = fields[3]; if(value.equals("<unknown>")){ value = ""; } } String multiple[] = multiplicity.split(","); //Assign the VM to a temp field. |VM,ML| int vm = Integer.parseInt(multiple[0]); //Assign the ML to a temp field. |VM,ML| int ml = Integer.parseInt(multiple[1]); //100407-WFP-Discovered DCF does not handle VR=OF. Adding IF // statement to ignore any text lines with this VR value. if(elementVR == DicomDataDictionary.getVR("OF")){ return; } //If VM is greater than 1. if(vm > 1){ if(elementVR == DicomDataDictionary.getVR("OB")){ DicomOBElement element; byte[] dataArray = null; //If element already exist, then pull the data and add new value. if(dds.containsElement(aTag)){ //Extract element from dds. element = (DicomOBElement)dds.findElement(aTag); //Extract the existing data from the element ByteBuffer dataBuffer = element.getBuffer(); dataBuffer.get(dataArray); } //Assign value to nuValue. It uses Long primitive to make sure there is no // truncation of data. Long nuValue = new Long(value); //Add nuValue to existing data. byte[] nuArray = this.addElementToByteArray(dataArray, nuValue); //You cannot just add the nuArray to the existing element in the dds. // Must create a new element and insert it into the dds. This automatically // overwrites the original element. DicomOBElement updatedElement = new DicomOBElement(aTag, nuArray); dds.insert(updatedElement); } else if(elementVR == DicomDataDictionary.getVR("US")){ DicomUSElement element; int[] dataArray = null; //If element already exist, then pull the data and add new value. if(dds.containsElement(aTag)){ //Extract element from dds. element = (DicomUSElement)dds.findElement(aTag); //Extract the existing data from the element dataArray = element.getUSData(); } //Assign value to nuValue. It uses Long primitive to make sure there is no // truncation of data. Long nuValue = new Long(value); //Add nuValue to existing data. int[] nuArray = this.addElementToIntArray(dataArray, nuValue); //You cannot just add the nuArray to the existing element in the dds. // Must create a new element and insert it into the dds. This automatically // overwrites the original element. DicomUSElement updatedElement = new DicomUSElement(aTag, nuArray); dds.insert(updatedElement); } else if(elementVR == DicomDataDictionary.getVR("SL")){ DicomSLElement element; int[] dataArray = null; //If element already exist, then pull the data and add new value. if(dds.containsElement(aTag)){ //Extract element from dds. element = (DicomSLElement)dds.findElement(aTag); //Extract the existing data from the element dataArray = element.getSLData(); } //Assign value to nuValue. It uses Long primitive to make sure there is no // truncation of data. Long nuValue = new Long(value); //Add nuValue to existing data. int[] nuArray = this.addElementToIntArray(dataArray, nuValue); //You cannot just add the nuArray to the existing element in the dds. // Must create a new element and insert it into the dds. This automatically // overwrites the original element. DicomSLElement updatedElement = new DicomSLElement(aTag, nuArray); dds.insert(updatedElement); } else if(elementVR == DicomDataDictionary.getVR("OW")){ DicomOWElement element; short[] dataArray = null; //If element already exist, then pull the data and add new value. if(dds.containsElement(aTag)){ //Extract element from dds. element = (DicomOWElement)dds.findElement(aTag); //Extract the existing data from the element ShortBuffer dataBuffer = (ShortBuffer)element.getValue(); // 3.2.2c getShortBuffer(); dataBuffer.get(dataArray); } //Assign value to nuValue. It uses Long primitive to make sure there is no // truncation of data. Long nuValue = new Long(value); //Add nuValue to existing data. short[] nuArray = this.addElementToShortArray(dataArray, nuValue); //You cannot just add the nuArray to the existing element in the dds. // Must create a new element and insert it into the dds. This automatically // overwrites the original element. DicomOWElement updatedElement = new DicomOWElement(aTag, nuArray); dds.insert(updatedElement); } else if(elementVR == DicomDataDictionary.getVR("SS")){ DicomSSElement element; short[] dataArray = null; //If element already exist, then pull the data and add new value. if(dds.containsElement(aTag)){ //Extract element from dds. element = (DicomSSElement)dds.findElement(aTag); //Extract the existing data from the element dataArray = element.getSSData(); } //Assign value to nuValue. It uses Long primitive to make sure there is no // truncation of data. Long nuValue = new Long(value); //Add nuValue to existing data. short[] nuArray = this.addElementToShortArray(dataArray, nuValue); //You cannot just add the nuArray to the existing element in the dds. // Must create a new element and insert it into the dds. This automatically // overwrites the original element. DicomSSElement updatedElement = new DicomSSElement(aTag, nuArray); dds.insert(updatedElement); } else if(elementVR == DicomDataDictionary.getVR("UL")){ DicomULElement element; int[] dataArray = null; //If element already exist, then pull the data and add new value. if(dds.containsElement(aTag)){ //Extract element from dds. element = (DicomULElement)dds.findElement(aTag); //Extract the existing data from the element dataArray = element.getULData(); } //Assign value to nuValue. It uses Long primitive to make sure there is no // truncation of data. Long nuValue = new Long(value); //Add nuValue to existing data. int[] nuArray = this.addElementToIntArray(dataArray, nuValue); //You cannot just add the nuArray to the existing element in the dds. // Must create a new element and insert it into the dds. This automatically // overwrites the original element. DicomULElement updatedElement = new DicomULElement(aTag, nuArray); dds.insert(updatedElement); } else if(elementVR == DicomDataDictionary.getVR("FL")){ DicomFLElement element; float[] dataArray = null; //If element already exist, then pull the data and add new value. if(dds.containsElement(aTag)){ //Extract element from dds. element = (DicomFLElement)dds.findElement(aTag); //Extract the existing data from the element dataArray = element.getFLData(); } //Assign value to nuValue. It uses Long primitive to make sure there is no // truncation of data. float nuValue = new Float(value); //Add nuValue to existing data. float[] nuArray = this.addElementToFloatArray(dataArray, nuValue); //You cannot just add the nuArray to the existing element in the dds. // Must create a new element and insert it into the dds. This automatically // overwrites the original element. DicomFLElement updatedElement = new DicomFLElement(aTag, nuArray); dds.insert(updatedElement); } else if(elementVR == DicomDataDictionary.getVR("FD")){ DicomFDElement element; double[] dataArray = null; //If element already exist, then pull the data and add new value. if(dds.containsElement(aTag)){ //Extract element from dds. element = (DicomFDElement)dds.findElement(aTag); //Extract the existing data from the element dataArray = element.getFDData(); } //Assign value to nuValue. It uses Long primitive to make sure there is no // truncation of data. double nuValue = new Double(value); //Add nuValue to existing data. double[] nuArray = this.addElementToDoubleArray(dataArray, nuValue); //You cannot just add the nuArray to the existing element in the dds. // Must create a new element and insert it into the dds. This automatically // overwrites the original element. DicomFDElement updatedElement = new DicomFDElement(aTag, nuArray); dds.insert(updatedElement); } else{ // at this point it is assumed value VR is of char string, not binary! String currentValue = ""; if(dds.containsElement(aTag)){ // Extract element from dds -- Must retrieve each individual value this way // as the VM is greater than 1. -- Then concatenate together again. DicomElement element = dds.findElement(aTag); for(int i=0; i<element.vm(); i++){ currentValue = currentValue.concat(element.getStringValue(i)); if(i < element.vm()-1){ currentValue = currentValue.concat("\\"); } } } else{ currentValue = null; } //then Append a "\\" and the Value variable to this Tag. DicomElement updatedElement = DicomElementFactory.instance().createElement(aTag, (currentValue+"\\"+value)); dds.insert(updatedElement); } } //If ML is greater than 1. else if(ml > 1){ //then extract this Tag from the dds. String currentValue; if(dds.containsElement(aTag)){ currentValue = dds.getElementStringValue(aTag); } else{ currentValue = null; } //then Append the Value variable to this Tag. if(aTag.group() != 0x7FE0){ DicomElement updatedElement = DicomElementFactory.instance().createElement(aTag, (currentValue+value)); dds.insert(updatedElement); } if(aTag.group() == 0x7FE0){ if(ml == 2){ String parsedValues[] = value.split("="); String lengthValue = parsedValues[1]; String parsedLength[] = lengthValue.split(" "); String pixelDataLength = parsedLength[0].trim(); Long lengthLong = new Long(pixelDataLength); this.originalPixelData.setOriginalLength(lengthLong.longValue()); } if(ml == 3){ String parsedValues[] = value.split("="); String lengthValue = parsedValues[1]; String parsedOffset[] = lengthValue.split(" "); String pixelDataOffset = parsedOffset[0].trim(); Integer offsetInt = new Integer(pixelDataOffset); this.originalPixelData.setOriginalOffset(offsetInt.intValue()); } } } else{ //FUTURE The if sequence works. But I like to find a more efficient way. //AttributeTag aTag = new AttributeTag(tag); if((aTag.element() == 0) || (aTag.element() == 1)){ if(aTag.group() <= 2){ DicomElement nuElement = DicomElementFactory.instance().createElement(aTag, value); dds.insert(nuElement); } } else{ if(aTag.group() != 0x7FE0){ DicomElement nuElement = DicomElementFactory.instance().createElement(aTag, value); dds.insert(nuElement); } if((aTag.group() == 0x7FE0) && (aTag.element() == 0x0010)) { char isCarat = fields[1].charAt((fields[1].length())-3); if(isCarat == '^'){ String desc_vrField = fields[1]; String desc_vr[] = desc_vrField.split("\\^"); String textfileVR = desc_vr[1]; originalPixelData.setOriginalVR(DicomDataDictionary.getVR(textfileVR)); } else{ short bitsAllocated = (short)dds.getElementIntValue(DCM.E_BITS_ALLOCATED); this.originalPixelData.setBitsAllocated(bitsAllocated); } } } } //End If for VM/VL. } } else if (fields.length == 2){ short vr = DicomDataDictionary.getElementVR(aTag); if(DicomDataDictionary.getVRString(vr).equals("SQ")){ DicomSQElement nuElement = new DicomSQElement( aTag, (DicomDataSet[])null ); dds.insert(nuElement); } } } catch(DCSException dcs){ logger.error(dcs.getMessage()); logger.error(this.getClass().getName()+": " + "Exception thrown while extracting Dicom Element."); throw new TextFileExtractionException("Failure to extract Dicom Element.", dcs); } } /** * Parse the Text file HIS Update section. The Text file is made up of three sections, * "Data1", "DICOM Data" and optionally "HIS UPDATE". Here only the HIS UPDATE section * is read and decoded. * * @param buffer represents the Text file now in the form of a BufferReader object. * @throws TextFileExtractionException */ private HashMap<String, String> parseHisUpdates(BufferedReader buffer) throws TextFileExtractionException { String textLine = ""; HashMap<String, String> hisUpdates=null; // testLogger.info("... Parsing text data update section ..."); try { // parse line for initial "gggg,eeee" or "gggg,eeee gggg,eeee" tags do { textLine = this.getNextTextLine(buffer); } while(!(textLine.startsWith("$$BEGIN HIS UPDATE"))); } catch(TextFileExtractionException tfee) { // catch all logger.info("Warning: NO HIS Update section in text data !!!"); logger.debug("Warning: obsolete TXT format NO VistA updates !!! "); return hisUpdates; } textLine = this.getNextTextLine(buffer); // skip to first HIS update line hisUpdates=new HashMap<String, String>(); while(!(textLine.startsWith("$$END HIS UPDATE"))) { try { // parse line for initial "gggg,eeee" or "gggg,eeee gggg,eeee" tags // and ending text value String splitPipePattern = "\\|"; Pattern pFields = Pattern.compile(splitPipePattern); String fields[] = new String[4]; fields = pFields.split(textLine); if ( ((fields[0].length()==9) || (fields[0].length()==18)) && (fields[3].length()>0)) { // place tag(s)-value pairs to HashMap hisUpdates.put(fields[0], fields[3]); } } catch(Throwable t) { // catch all } textLine = this.getNextTextLine(buffer); } // end wile if (hisUpdates.isEmpty()) return null; else return hisUpdates; } /** * Reads next line from the Text file, which is now in the form of BufferedReader * object. This method checks for EOF, nulls, and blank lines. This is primarily in * case the Text file was not properly formatted or corrupted. If the line is valid, * the line is returned. * * @param in represents the BufferedReader object. * @return represents the next line read from the BufferedReader object. * @throws TextFileExtractionException */ private String getNextTextLine(BufferedReader in) throws TextFileExtractionException{ String line = null; try{ do{ if((line = in.readLine()) == null){ throw new TextFileExtractionException(); } }while(line.equals("")); } catch(IOException io){ logger.error(io.getMessage()); logger.error(this.getClass().getName()+": " + "Exception thrown while getting next line from Text Line."); throw new TextFileExtractionException("Failure to get next line.", io); } return line; } private boolean isGroupToBeAdded(int Group){ boolean addGroup = true; //Do not allow Odd Groups if((Group % 2) != 0){ addGroup = false; } //Do not allow Icon Image Sequences if(Group == 0x0088){ addGroup = false; } //Need to remove Group 0002 elements. Found bug of sending Group 0002 elements // to CSTore SCP device. This is not valid. I strongly believe to add it here. I do // not think any code between here and the sending uses this group. But I could be // wrong. if(Group == 0x0002){ addGroup = false; } return addGroup; } private void customTGAElementCleanup(){ try{ if(this.dicomDataSet.containsElement(new AttributeTag("0008,0008"))){ DicomCSElement objectType = (DicomCSElement)this.dicomDataSet.findElement(new AttributeTag("0008,0008")); String[] values = objectType.getStringData(); ArrayList<String> nuValueArray = new ArrayList<String>(values.length); for(int i=0; i<values.length; i++){ nuValueArray.add(values[i]); } while(nuValueArray.contains("")){ nuValueArray.remove(""); } nuValueArray.trimToSize(); String[] nuValues = new String[nuValueArray.size()]; Iterator<String> iter = nuValueArray.iterator(); int j = 0; while(iter.hasNext()){ nuValues[j] = iter.next(); j++; } DicomCSElement nuObjectType = new DicomCSElement(new AttributeTag("0008,0008"), nuValues); this.dicomDataSet.insert(nuObjectType); } } catch(DCSException dcsX){ //do nothing } } private byte[] addElementToByteArray(byte[] oldArray, Long nuValue){ if(oldArray == null){ oldArray = new byte[0]; } int length = oldArray.length; int index = length+1; byte nuArray[] = new byte[index]; System.arraycopy(oldArray, 0, nuArray, 0, length); nuArray[index-1] = nuValue.byteValue(); return nuArray; } private short[] addElementToShortArray(short[] oldArray, Long nuValue){ int length = oldArray.length; int index = length+1; short nuArray[] = new short[index]; System.arraycopy(oldArray, 0, nuArray, 0, length); nuArray[index-1] = nuValue.shortValue(); return nuArray; } private int[] addElementToIntArray(int[] oldArray, Long nuValue){ int length = oldArray.length; int index = length+1; int nuArray[] = new int[index]; System.arraycopy(oldArray, 0, nuArray, 0, length); nuArray[index-1] = nuValue.intValue(); return nuArray; } private float[] addElementToFloatArray(float[] oldArray, Float nuValue){ int length = oldArray.length; int index = length+1; float nuArray[] = new float[index]; System.arraycopy(oldArray, 0, nuArray, 0, length); nuArray[index-1] = nuValue.floatValue(); return nuArray; } private double[] addElementToDoubleArray(double[] oldArray, Double nuValue){ int length = oldArray.length; int index = length+1; double nuArray[] = new double[index]; System.arraycopy(oldArray, 0, nuArray, 0, length); nuArray[index-1] = nuValue.doubleValue(); return nuArray; } }
/* * JetS3t : Java S3 Toolkit * Project hosted at http://bitbucket.org/jmurty/jets3t/ * * Copyright 2008 James Murty * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jets3t.tests; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.Socket; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.Properties; import javax.net.SocketFactory; import javax.net.ssl.SSLSocketFactory; import org.jets3t.service.security.AWSCredentials; import org.jets3t.service.utils.RestUtils; import org.jets3t.service.utils.ServiceUtils; /** * Very basic client implementation able to PUT files into S3 using the Java * socket implementation directly, with no HTTP library or transport of any kind. * <p> * To use this class: * <ul> * <li>provide a <tt>test.properties</tt> file in the classpath * that contains the settings <tt>aws.accesskey</tt> and <tt>aws.secretkey<tt></li> * <li>modify the <tt>filename</tt> and <tt>bucketName</tt> variables in main() to * provide the file you wish to upload, and the <b>already existing</b> buckety you * want to upload it to in S3</li> * </ul> * * @author James Murty */ public class PutViaSocket { private static String TEST_PROPERTIES_FILENAME = "test.properties"; private static Properties loadTestProperties() throws IOException { InputStream propertiesIS = ClassLoader.getSystemResourceAsStream(TEST_PROPERTIES_FILENAME); if (propertiesIS == null) { throw new IOException("Unable to load test properties file from classpath: " + TEST_PROPERTIES_FILENAME); } Properties testProperties = new Properties(); testProperties.load(propertiesIS); return testProperties; } private static AWSCredentials loadAWSCredentials(Properties testProperties) throws Exception { return new AWSCredentials( testProperties.getProperty("aws.accesskey"), testProperties.getProperty("aws.secretkey")); } private static String generateAuthorizationString(AWSCredentials awsCredentials, String url, Map headersMap) throws Exception { String canonicalString = RestUtils.makeServiceCanonicalString( "PUT", url, headersMap, null, null, null); // Sign the canonical string. String signedCanonical = ServiceUtils.signWithHmacSha1( awsCredentials.getSecretKey(), canonicalString); return "AWS " + awsCredentials.getAccessKey() + ":" + signedCanonical; } public static void main(String[] args) throws Exception { Properties testProperties = loadTestProperties(); AWSCredentials awsCredentials = loadAWSCredentials(testProperties); String filename = testProperties.getProperty("filename"); String bucketName = testProperties.getProperty("bucketName"); String contentType = testProperties.getProperty("contentType", "application/octet-stream"); String serverHostname = testProperties.getProperty("serverHostname", "s3.amazonaws.com"); String bufferSizeStr = testProperties.getProperty("bufferSize", "2048"); int byteBufferSize = Integer.parseInt(bufferSizeStr); int port = 80; boolean isSslEnabled; String enableSslStr = testProperties.getProperty("enableSSL", "false"); if ("true".equalsIgnoreCase(enableSslStr)) { isSslEnabled = true; port = 443; } else if ("false".equalsIgnoreCase(enableSslStr)) { isSslEnabled = false; } else { throw new IllegalArgumentException("Boolean value '" + enableSslStr + "' for property 'enableSSL' must be 'true' or 'false' (case-insensitive)"); } // Over-ride default server ports (80, 443) if a special port is configured. String serverPortStr = testProperties.getProperty("serverPort", null); if (serverPortStr != null) { port = Integer.parseInt(serverPortStr); } boolean isS3AuthEnabled; String disableS3FeaturesStr = testProperties.getProperty("disableS3Features", "false"); if ("true".equalsIgnoreCase(disableS3FeaturesStr)) { isS3AuthEnabled = false; } else if ("false".equalsIgnoreCase(disableS3FeaturesStr)) { isS3AuthEnabled = true; } else { throw new IllegalArgumentException("Boolean value '" + disableS3FeaturesStr + "' for property 'disableS3Features' must be 'true' or 'false' (case-insensitive)"); } boolean isBugBehaviourEnabled; String enableBugBehaviourStr = testProperties.getProperty("enableBugBehaviour", "false"); if ("true".equalsIgnoreCase(enableBugBehaviourStr)) { isBugBehaviourEnabled = true; } else if ("false".equalsIgnoreCase(enableBugBehaviourStr)) { isBugBehaviourEnabled = false; } else { throw new IllegalArgumentException("Boolean value '" + enableBugBehaviourStr + "' for property 'enableBugBehaviour' must be 'true' or 'false' (case-insensitive)"); } System.out.println("AWS Access Key: " + awsCredentials.getAccessKey()); System.out.println("filename: " + filename); System.out.println("bucketName: " + bucketName); System.out.println("contentType: " + contentType); System.out.println("serverHostname: " + serverHostname); System.out.println("serverPort: " + port); System.out.println("bufferSize: " + byteBufferSize); System.out.println("isSslEnabled? " + isSslEnabled); System.out.println("isS3AuthEnabled? " + isS3AuthEnabled); System.out.println("isBugBehaviourEnabled? " + isBugBehaviourEnabled); File file = new File(filename); String url = "/" + bucketName + "/" + file.getName(); System.out.println("\nComputing MD5 hash of file: " + file.getName()); long fileSize = file.length(); byte[] md5Hash = ServiceUtils.computeMD5Hash( new BufferedInputStream(new FileInputStream(file))); System.out.println("MD5 hash of file B64=" + ServiceUtils.toBase64(md5Hash) + " Hex=" + ServiceUtils.toHex(md5Hash)); SocketFactory socketFactory = null; if (isSslEnabled) { socketFactory = SSLSocketFactory.getDefault(); } else { socketFactory = SocketFactory.getDefault(); } System.out.println("\nConnecting to " + serverHostname + ":" + port); Socket socket = socketFactory.createSocket(serverHostname, port); socket.setKeepAlive(true); socket.setSoTimeout(60000); socket.setTcpNoDelay(true); System.out.println("Connected to " + socket.getInetAddress().toString() + ":" + socket.getPort()); OutputStream out = new BufferedOutputStream(socket.getOutputStream(), byteBufferSize); InputStream in = socket.getInputStream(); Map headersMap = new HashMap(); headersMap.put("Content-MD5", ServiceUtils.toBase64(md5Hash)); headersMap.put("Content-Type", contentType); headersMap.put("Date", ServiceUtils.formatRfc822Date(new Date())); headersMap.put("S3Authorization", generateAuthorizationString(awsCredentials, url,headersMap)); String headers = ""; if (isBugBehaviourEnabled) { // Original Headers that exhibit the Bad Digest bug. headers = "PUT " + url + " HTTP/1.1\r\n" + "Content-Length: " + fileSize + "\r\n" + "Content-MD5: " + headersMap.get("Content-MD5") + "\r\n" + "Content-Type: " + headersMap.get("Content-Type") + "\r\n" + "Date: " + headersMap.get("Date") + "\r\n" + (isS3AuthEnabled ? "Authorization: " + headersMap.get("S3Authorization") + "\r\n" : "") + "Host: " + serverHostname + "\r\n" + "\r\n"; } else { // Complete Header set re-ordered following s3curl example, has succeeded at least once. headers = "PUT " + url + " HTTP/1.1\r\n" + "User-Agent: PutViaSocket/1.0\r\n" + "Host: " + serverHostname + "\r\n" + "Accept: */*\r\n" + "Date: " + headersMap.get("Date") + "\r\n" + (isS3AuthEnabled ? "Authorization: " + headersMap.get("S3Authorization") + "\r\n" : "") + "Content-Length: " + fileSize + "\r\n" + "Content-MD5: " + headersMap.get("Content-MD5") + "\r\n" + "Content-Type: " + headersMap.get("Content-Type") + "\r\n" + "Expect: 100-continue\r\n" + "\r\n"; } // Output PUT Headers System.out.println("\nREQUEST:"); System.out.print(headers); System.out.println(); byte[] data = new byte[byteBufferSize]; int dataRead = 0; long megabytesSent = 0; out.write(headers.getBytes()); out.flush(); if (!isBugBehaviourEnabled) { // Handle Expect: 100-Continue Thread.sleep(500); boolean isContinueOK = false; if (in.available() > 0) { System.out.println("\nResponse to Expect: 100-Continue..."); while ((dataRead = in.read(data)) != -1) { String line = new String(data, 0, dataRead); System.out.print(line); if (line.indexOf("HTTP/1.1 100 Continue") >= 0) { isContinueOK = true; break; } } if (!isContinueOK) { // Uh oh, something must have gone wrong. Write the server's response and quit. System.out.println("\n\nQuitting without performing upload"); in.close(); out.close(); return; } } } FileInputStream fis = new FileInputStream(file); long fileBytesTransferred = 0; int failureCount = 0; int MAX_FAILURE_RETRIES = 10; // PUT Data System.out.println("Uploading " + fileSize + " bytes"); while ((dataRead = fis.read(data)) != -1) { try { out.write(data, 0, dataRead); fileBytesTransferred += dataRead; if (fileBytesTransferred / (1024 * 1024) > megabytesSent) { System.out.println("Uploaded " + (int)(fileBytesTransferred / (double)(1024 * 1024)) + "MB of " + (fileSize / (double)(1024 * 1024)) + "MB"); megabytesSent = fileBytesTransferred / (1024 * 1024); } // Check for any data available in the socket input/error streams if (in.available() > 0) { // Uh oh, this shouldn't happen. We'd better stop the upload and print out the error. System.out.println("\nERROR: Unexpected data in server input stream mid-transfer, halting upload"); break; } } catch (Exception e) { // Try to recover from the failure (it's unlikely this will ever work) failureCount++; if (failureCount <= MAX_FAILURE_RETRIES) { System.out.println("SocketException " + failureCount + ", will retry: " + e); Thread.sleep(500); } else { break; } } } out.flush(); fis.close(); if (fileBytesTransferred < fileSize) { System.out.println("Upload did not complete, only " + fileBytesTransferred + " of " + fileSize + " bytes sent"); } else { System.out.println("Upload completed"); } // Read response System.out.println("\nRESPONSE:"); while ((dataRead = in.read(data)) != -1) { String line = new String(data, 0, dataRead); System.out.print(line); if (line.endsWith("\r\n\r\n")) { break; } } in.close(); out.close(); socket.close(); } }
/* * Copyright 2015 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.cellbase.mongodb.db.core; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; import com.mongodb.DBObject; import com.mongodb.QueryBuilder; import org.opencb.biodata.models.core.Region; import org.opencb.cellbase.core.db.api.core.GeneDBAdaptor; import org.opencb.cellbase.core.db.api.variation.ClinicalDBAdaptor; import org.opencb.cellbase.core.variant.annotation.VariantAnnotationUtils; import org.opencb.cellbase.mongodb.MongoDBCollectionConfiguration; import org.opencb.cellbase.mongodb.db.MongoDBAdaptor; import org.opencb.datastore.core.QueryOptions; import org.opencb.datastore.core.QueryResult; import org.opencb.datastore.mongodb.MongoDataStore; import java.util.*; public class GeneMongoDBAdaptor extends MongoDBAdaptor implements GeneDBAdaptor { private int geneChunkSize = MongoDBCollectionConfiguration.GENE_CHUNK_SIZE; private ClinicalDBAdaptor clinicalDBAdaptor; // public GeneMongoDBAdaptor(DB db) { // super(db); // } // @Deprecated // public GeneMongoDBAdaptor(DB db, String species, String assembly) { // super(db, species, assembly); // mongoDBCollection = db.getCollection("gene"); // // logger.info("GeneMongoDBAdaptor: in 'constructor'"); // } public GeneMongoDBAdaptor(String species, String assembly, MongoDataStore mongoDataStore) { super(species, assembly, mongoDataStore); mongoDBCollection = mongoDataStore.getCollection("gene"); logger.debug("GeneMongoDBAdaptor: in 'constructor'"); } public ClinicalDBAdaptor getClinicalDBAdaptor() { return clinicalDBAdaptor; } public void setClinicalDBAdaptor(ClinicalDBAdaptor clinicalDBAdaptor) { this.clinicalDBAdaptor = clinicalDBAdaptor; } @Override public QueryResult first() { return mongoDBCollection.find(new BasicDBObject(), new QueryOptions("limit", 1)); } @Override public QueryResult count() { return mongoDBCollection.count(); } @Override public QueryResult stats() { return null; } @Override public QueryResult getAll(QueryOptions options) { QueryBuilder builder = new QueryBuilder(); List<String> biotypes = options.getAsStringList("biotype"); if (biotypes != null && biotypes.size() > 0) { BasicDBList biotypeIds = new BasicDBList(); biotypeIds.addAll(biotypes); builder = builder.and("biotype").in(biotypeIds); } return executeQuery("result", builder.get(), options); } public QueryResult next(String id, QueryOptions options) { QueryOptions options1 = new QueryOptions(); options1.put("include", Arrays.asList("chromosome", "start", "strand")); QueryResult queryResult = getAllById(id, options1); if (queryResult != null && queryResult.getResult() != null) { DBObject gene = (DBObject) queryResult.getResult().get(0); String chromosome = gene.get("chromosome").toString(); // options.put("strand", gene.get("strand").toString()); int start = Integer.parseInt(gene.get("start").toString()); return next(chromosome, start, options); } return null; } @Override public QueryResult next(String chromosome, int position, QueryOptions options) { return next(chromosome, position + 1, options, mongoDBCollection); } @Override public QueryResult getAllById(String id, QueryOptions options) { return getAllByIdList(Arrays.asList(id), options).get(0); } @Override public List<QueryResult> getAllByIdList(List<String> idList, QueryOptions options) { List<DBObject> queries = new ArrayList<>(idList.size()); for (String id : idList) { QueryBuilder builder = QueryBuilder.start("transcripts.xrefs.id").is(id); queries.add(builder.get()); } // options = addExcludeReturnFields("transcripts", options); // return executeQueryList(idList, queries, options); return executeQueryList2(idList, queries, options); } @Override public QueryResult getStatsById(String id, QueryOptions options) { Map<String, Object> stats = new HashMap<>(); QueryResult queryResult = new QueryResult(); queryResult.setId(id); QueryBuilder geneBuilder = QueryBuilder.start("transcripts.xrefs.id").is(id); long dbTimeStart = System.currentTimeMillis(); QueryResult geneQueryResult = executeQuery(id, geneBuilder.get(), new QueryOptions()); // TODO: clinical variant summary is only provided for ClinVar (hardcoded below) QueryOptions clinicalQueryOptions = new QueryOptions("source", "clinvar"); clinicalQueryOptions.put("include", "annot.consequenceTypes.soTerms,clinvarSet.referenceClinVarAssertion.clinicalSignificance.description"); QueryResult clinicalQueryResult = clinicalDBAdaptor.getByGeneId(id, clinicalQueryOptions); long dbTimeEnd = System.currentTimeMillis(); queryResult.setDbTime(Long.valueOf(dbTimeEnd - dbTimeStart).intValue()); if (geneQueryResult.getNumResults() > 0) { queryResult.setNumResults(1); stats = setCoreGeneStats(geneQueryResult, stats); stats = setVariantStats(clinicalQueryResult, stats); queryResult.setResult(Collections.singletonList(stats)); } return queryResult; // gene name // ensembl gene id // chr // start // end // sequence length // num transcripts // breakdown num transcripts by biotype // num exons // num drug interactions // Clinical Variants { // # // Breakdown by clinical significance // Breakdown by SO // } // options = addExcludeReturnFields("transcripts", options); // return executeQueryList(idList, queries, options); } private Map<String, Object> setVariantStats(QueryResult queryResult, Map<String, Object> stats) { if (queryResult != null && queryResult.getNumResults() > 0) { Map<String, Map> clinicalVariantStats = new HashMap<>(); Map<String, Integer> clinicalSignificanceSummary = new HashMap<>(); Map<String, Map> soSummary = new HashMap<>(); for (Object result : queryResult.getResult()) { clinicalSignificanceSummary = updateClinicalSignificanceSummary((BasicDBObject) result, clinicalSignificanceSummary); soSummary = updateSoSummary((BasicDBObject) result, soSummary); } clinicalVariantStats.put("clinicalSignificanceSummary", clinicalSignificanceSummary); clinicalVariantStats.put("soSummary", soSummary); stats.put("clinicalVariantStats", clinicalVariantStats); } return stats; } private Map<String, Map> updateSoSummary(BasicDBObject result, Map<String, Map> soSummary) { BasicDBObject basicDBObject = (BasicDBObject) result.get("annot"); if (basicDBObject != null) { BasicDBList basicDBList = (BasicDBList) basicDBObject.get("consequenceTypes"); if (basicDBList != null) { basicDBObject = getMostSevereSOTerm(basicDBList); if (basicDBObject != null) { String soAccesion = (String) basicDBObject.get("soAccession"); if (soSummary.containsKey(soAccesion)) { Integer currentCount = (Integer) soSummary.get(soAccesion).get("count"); soSummary.get(soAccesion).put("count", currentCount + 1); } else { String soName = (String) basicDBObject.get("soName"); Map<String, Object> soSummaryMap = new HashMap<>(2); soSummaryMap.put("soName", soName); soSummaryMap.put("count", 1); soSummary.put(soAccesion, soSummaryMap); } } } } return soSummary; } private BasicDBObject getMostSevereSOTerm(BasicDBList consequenceTypeDBList) { BasicDBObject mostSevereSODBObject = null; Integer maxSeverity = 0; for (Object consequenceTypeObject : consequenceTypeDBList) { BasicDBList soDBList = (BasicDBList) ((BasicDBObject) consequenceTypeObject).get("soTerms"); if (soDBList != null) { for (Object soObject : soDBList) { BasicDBObject soDBObject = (BasicDBObject) soObject; String soName = (String) soDBObject.get("soName"); if (VariantAnnotationUtils.SO_SEVERITY.containsKey(soName)) { Integer severity = VariantAnnotationUtils.SO_SEVERITY.get(soName); if (severity > maxSeverity) { maxSeverity = severity; mostSevereSODBObject = soDBObject; } } } } } return mostSevereSODBObject; } private Map<String, Integer> updateClinicalSignificanceSummary(BasicDBObject result, Map<String, Integer> clinicalSignificanceSummary) { BasicDBObject basicDBObject = (BasicDBObject) result.get("clinvarSet"); if (basicDBObject != null) { basicDBObject = (BasicDBObject) basicDBObject.get("referenceClinVarAssertion"); if (basicDBObject != null) { basicDBObject = (BasicDBObject) basicDBObject.get("clinicalSignificance"); if (basicDBObject != null) { String clinicalSignificance = (String) basicDBObject.get("description"); if (clinicalSignificance != null) { if (clinicalSignificanceSummary.containsKey(clinicalSignificance)) { clinicalSignificanceSummary .put(clinicalSignificance, clinicalSignificanceSummary.get(clinicalSignificance) + 1); } else { clinicalSignificanceSummary.put(clinicalSignificance, 1); } } } } } return clinicalSignificanceSummary; } private Map<String, Object> setCoreGeneStats(QueryResult queryResult, Map<String, Object> stats) { BasicDBObject resultDBObject = (BasicDBObject) queryResult.getResult().get(0); stats.put("name", resultDBObject.get("name")); stats.put("id", resultDBObject.get("id")); stats.put("chromosome", resultDBObject.get("chromosome")); int start = (int) resultDBObject.get("start"); stats.put("start", start); int end = (int) resultDBObject.get("end"); stats.put("start", end); stats.put("length", end - start + 1); return stats; } @Override public QueryResult getAllByXref(String id, QueryOptions options) { return null; } @Override public List<QueryResult> getAllByXrefList(List<String> idList, QueryOptions options) { return null; } @Override public QueryResult getAllBiotypes(QueryOptions options) { DBObject query = null; if (options != null && options.get("chromosome") != null) { query = QueryBuilder.start("chromosome").is(options.get("chromosome")).get(); } return executeDistinct("distinct", "biotype", query); } @Override public QueryResult getAllTargetsByTf(String tfId, QueryOptions queryOptions) { return null; } @Override public List<QueryResult> getAllTargetsByTfList(List<String> tfIdList, QueryOptions queryOptions) { return null; } @Override public List<QueryResult> getAllByRegionList(List<Region> regions, QueryOptions options) { List<DBObject> queries = new ArrayList<>(); List<Object> biotypes = options.getList("biotype", null); BasicDBList biotypeIds = new BasicDBList(); if (biotypes != null && biotypes.size() > 0) { biotypeIds.addAll(biotypes); } List<String> ids = new ArrayList<>(regions.size()); for (Region region : regions) { QueryBuilder builder = null; // If regions is 1 position then query can be optimize using chunks if (region.getStart() == region.getEnd()) { builder = QueryBuilder.start("_chunkIds") .is(getChunkIdPrefix(region.getChromosome(), region.getStart(), geneChunkSize)).and("end") .greaterThanEquals(region.getStart()).and("start").lessThanEquals(region.getEnd()); } else { builder = QueryBuilder.start("chromosome").is(region.getChromosome()).and("end") .greaterThanEquals(region.getStart()).and("start").lessThanEquals(region.getEnd()); } if (biotypeIds.size() > 0) { builder = builder.and("biotype").in(biotypeIds); } queries.add(builder.get()); ids.add(region.toString()); } return executeQueryList2(ids, queries, options); // return executeQueryList(ids, queries, options); } @Override public QueryResult getIntervalFrequencies(Region region, QueryOptions queryOptions) { return super.getIntervalFrequencies(region, queryOptions); } @Override public List<QueryResult> getAllIntervalFrequencies(List<Region> regions, QueryOptions queryOptions) { return super.getAllIntervalFrequencies(regions, queryOptions); } }
/* * Copyright 2014-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onlab.packet; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.ListIterator; import static com.google.common.base.Preconditions.checkArgument; import static org.onlab.packet.PacketUtils.checkInput; import static com.google.common.base.MoreObjects.toStringHelper; /** * Representation of an DHCP Packet. */ public class DHCP extends BasePacket { /** * Dynamic Host Configuration Protocol packet. * ------------------------------------------ |op (1) | htype(1) | hlen(1) | * hops(1) | ------------------------------------------ | xid (4) | * ------------------------------------------ | secs (2) | flags (2) | * ------------------------------------------ | ciaddr (4) | * ------------------------------------------ | yiaddr (4) | * ------------------------------------------ | siaddr (4) | * ------------------------------------------ | giaddr (4) | * ------------------------------------------ | chaddr (16) | * ------------------------------------------ | sname (64) | * ------------------------------------------ | file (128) | * ------------------------------------------ | options (312) | * ------------------------------------------ * */ // Header + magic without options public static final int MIN_HEADER_LENGTH = 240; public static final byte OPCODE_REQUEST = 0x1; public static final byte OPCODE_REPLY = 0x2; public static final byte HWTYPE_ETHERNET = 0x1; public enum DHCPOptionCode { OptionCode_SubnetMask((byte) 1), OptionCode_RouterAddress((byte) 3), OptionCode_DomainServer((byte) 6), OptionCode_HostName((byte) 12), OptionCode_DomainName((byte) 15), OptionCode_BroadcastAddress((byte) 28), OptionCode_RequestedIP((byte) 50), OptionCode_LeaseTime((byte) 51), OptionCode_MessageType((byte) 53), OptionCode_DHCPServerIp((byte) 54), OptionCode_RequestedParameters((byte) 55), OptionCode_RenewalTime((byte) 58), OPtionCode_RebindingTime((byte) 59), OptionCode_ClientID((byte) 61), OptionCode_END((byte) 255); protected byte value; private DHCPOptionCode(final byte value) { this.value = value; } public byte getValue() { return this.value; } } protected byte opCode; protected byte hardwareType; protected byte hardwareAddressLength; protected byte hops; protected int transactionId; protected short seconds; protected short flags; protected int clientIPAddress; protected int yourIPAddress; protected int serverIPAddress; protected int gatewayIPAddress; protected byte[] clientHardwareAddress; protected String serverName; protected String bootFileName; protected List<DHCPOption> options = new ArrayList<DHCPOption>(); /** * @return the opCode */ public byte getOpCode() { return this.opCode; } /** * @param opCode * the opCode to set * @return this */ public DHCP setOpCode(final byte opCode) { this.opCode = opCode; return this; } /** * @return the hardwareType */ public byte getHardwareType() { return this.hardwareType; } /** * @param hardwareType * the hardwareType to set * @return this */ public DHCP setHardwareType(final byte hardwareType) { this.hardwareType = hardwareType; return this; } /** * @return the hardwareAddressLength */ public byte getHardwareAddressLength() { return this.hardwareAddressLength; } /** * @param hardwareAddressLength * the hardwareAddressLength to set * @return this */ public DHCP setHardwareAddressLength(final byte hardwareAddressLength) { this.hardwareAddressLength = hardwareAddressLength; return this; } /** * @return the hops */ public byte getHops() { return this.hops; } /** * @param hops * the hops to set * @return this */ public DHCP setHops(final byte hops) { this.hops = hops; return this; } /** * @return the transactionId */ public int getTransactionId() { return this.transactionId; } /** * @param transactionId * the transactionId to set * @return this */ public DHCP setTransactionId(final int transactionId) { this.transactionId = transactionId; return this; } /** * @return the seconds */ public short getSeconds() { return this.seconds; } /** * @param seconds * the seconds to set * @return this */ public DHCP setSeconds(final short seconds) { this.seconds = seconds; return this; } /** * @return the flags */ public short getFlags() { return this.flags; } /** * @param flags * the flags to set * @return this */ public DHCP setFlags(final short flags) { this.flags = flags; return this; } /** * @return the clientIPAddress */ public int getClientIPAddress() { return this.clientIPAddress; } /** * @param clientIPAddress * the clientIPAddress to set * @return this */ public DHCP setClientIPAddress(final int clientIPAddress) { this.clientIPAddress = clientIPAddress; return this; } /** * @return the yourIPAddress */ public int getYourIPAddress() { return this.yourIPAddress; } /** * @param yourIPAddress * the yourIPAddress to set * @return this */ public DHCP setYourIPAddress(final int yourIPAddress) { this.yourIPAddress = yourIPAddress; return this; } /** * @return the serverIPAddress */ public int getServerIPAddress() { return this.serverIPAddress; } /** * @param serverIPAddress * the serverIPAddress to set * @return this */ public DHCP setServerIPAddress(final int serverIPAddress) { this.serverIPAddress = serverIPAddress; return this; } /** * @return the gatewayIPAddress */ public int getGatewayIPAddress() { return this.gatewayIPAddress; } /** * @param gatewayIPAddress * the gatewayIPAddress to set * @return this */ public DHCP setGatewayIPAddress(final int gatewayIPAddress) { this.gatewayIPAddress = gatewayIPAddress; return this; } /** * @return the clientHardwareAddress */ public byte[] getClientHardwareAddress() { return this.clientHardwareAddress; } /** * @param clientHardwareAddress * the clientHardwareAddress to set * @return this */ public DHCP setClientHardwareAddress(final byte[] clientHardwareAddress) { this.clientHardwareAddress = clientHardwareAddress; return this; } /** * Gets a specific DHCP option parameter. * * @param optionCode * The option code to get * @return The value of the option if it exists, null otherwise */ public DHCPOption getOption(final DHCPOptionCode optionCode) { for (final DHCPOption opt : this.options) { if (opt.code == optionCode.value) { return opt; } } return null; } /** * @return the options */ public List<DHCPOption> getOptions() { return this.options; } /** * @param options * the options to set * @return this */ public DHCP setOptions(final List<DHCPOption> options) { this.options = options; return this; } /** * @return the packetType base on option 53 */ public DHCPPacketType getPacketType() { final ListIterator<DHCPOption> lit = this.options.listIterator(); while (lit.hasNext()) { final DHCPOption option = lit.next(); // only care option 53 if (option.getCode() == 53) { return DHCPPacketType.getType(option.getData()[0]); } } return null; } /** * @return the serverName */ public String getServerName() { return this.serverName; } /** * @param server * the serverName to set * @return this */ public DHCP setServerName(final String server) { this.serverName = server; return this; } /** * @return the bootFileName */ public String getBootFileName() { return this.bootFileName; } /** * @param bootFile * the bootFileName to set * @return this */ public DHCP setBootFileName(final String bootFile) { this.bootFileName = bootFile; return this; } @Override public byte[] serialize() { // not guaranteed to retain length/exact format this.resetChecksum(); // minimum size 240 including magic cookie, options generally padded to // 300 int optionsLength = 0; for (final DHCPOption option : this.options) { if (option.getCode() == 0 || option.getCode() == ((byte) 255)) { optionsLength += 1; } else { optionsLength += 2 + (0xff & option.getLength()); } } int optionsPadLength = 0; if (optionsLength < 60) { optionsPadLength = 60 - optionsLength; } final byte[] data = new byte[240 + optionsLength + optionsPadLength]; final ByteBuffer bb = ByteBuffer.wrap(data); bb.put(this.opCode); bb.put(this.hardwareType); bb.put(this.hardwareAddressLength); bb.put(this.hops); bb.putInt(this.transactionId); bb.putShort(this.seconds); bb.putShort(this.flags); bb.putInt(this.clientIPAddress); bb.putInt(this.yourIPAddress); bb.putInt(this.serverIPAddress); bb.putInt(this.gatewayIPAddress); checkArgument(this.clientHardwareAddress.length <= 16, "Hardware address is too long (%s bytes)", this.clientHardwareAddress.length); bb.put(this.clientHardwareAddress); if (this.clientHardwareAddress.length < 16) { for (int i = 0; i < 16 - this.clientHardwareAddress.length; ++i) { bb.put((byte) 0x0); } } this.writeString(this.serverName, bb, 64); this.writeString(this.bootFileName, bb, 128); // magic cookie bb.put((byte) 0x63); bb.put((byte) 0x82); bb.put((byte) 0x53); bb.put((byte) 0x63); for (final DHCPOption option : this.options) { final int code = option.getCode() & 0xff; bb.put((byte) code); if (code != 0 && code != 255) { bb.put(option.getLength()); bb.put(option.getData()); } } // assume the rest is padded out with zeroes return data; } @Override public IPacket deserialize(final byte[] data, final int offset, final int length) { final ByteBuffer bb = ByteBuffer.wrap(data, offset, length); if (bb.remaining() < DHCP.MIN_HEADER_LENGTH) { return this; } this.opCode = bb.get(); this.hardwareType = bb.get(); this.hardwareAddressLength = bb.get(); this.hops = bb.get(); this.transactionId = bb.getInt(); this.seconds = bb.getShort(); this.flags = bb.getShort(); this.clientIPAddress = bb.getInt(); this.yourIPAddress = bb.getInt(); this.serverIPAddress = bb.getInt(); this.gatewayIPAddress = bb.getInt(); final int hardwareAddressLength = 0xff & this.hardwareAddressLength; this.clientHardwareAddress = new byte[hardwareAddressLength]; bb.get(this.clientHardwareAddress); for (int i = hardwareAddressLength; i < 16; ++i) { bb.get(); } this.serverName = this.readString(bb, 64); this.bootFileName = this.readString(bb, 128); // read the magic cookie // magic cookie bb.get(); bb.get(); bb.get(); bb.get(); // read options while (bb.hasRemaining()) { final DHCPOption option = new DHCPOption(); int code = 0xff & bb.get(); // convert signed byte to int in range // [0,255] option.setCode((byte) code); if (code == 0) { // skip these continue; } else if (code != 255) { if (bb.hasRemaining()) { final int l = 0xff & bb.get(); // convert signed byte to // int in range [0,255] option.setLength((byte) l); if (bb.remaining() >= l) { final byte[] optionData = new byte[l]; bb.get(optionData); option.setData(optionData); } else { // Skip the invalid option and set the END option code = 0xff; option.setCode((byte) code); option.setLength((byte) 0); } } else { // Skip the invalid option and set the END option code = 0xff; option.setCode((byte) code); option.setLength((byte) 0); } } this.options.add(option); if (code == 255) { // remaining bytes are supposed to be 0, but ignore them just in // case break; } } return this; } protected void writeString(final String string, final ByteBuffer bb, final int maxLength) { if (string == null) { for (int i = 0; i < maxLength; ++i) { bb.put((byte) 0x0); } } else { byte[] bytes = null; try { bytes = string.getBytes("ascii"); } catch (final UnsupportedEncodingException e) { throw new RuntimeException("Failure encoding server name", e); } int writeLength = bytes.length; if (writeLength > maxLength) { writeLength = maxLength; } bb.put(bytes, 0, writeLength); for (int i = writeLength; i < maxLength; ++i) { bb.put((byte) 0x0); } } } private static String readString(final ByteBuffer bb, final int maxLength) { final byte[] bytes = new byte[maxLength]; bb.get(bytes); String result = null; try { result = new String(bytes, "ascii").trim(); } catch (final UnsupportedEncodingException e) { throw new RuntimeException("Failure decoding string", e); } return result; } /** * Deserializer function for DHCP packets. * * @return deserializer function */ public static Deserializer<DHCP> deserializer() { return (data, offset, length) -> { checkInput(data, offset, length, MIN_HEADER_LENGTH); ByteBuffer bb = ByteBuffer.wrap(data, offset, length); DHCP dhcp = new DHCP(); dhcp.opCode = bb.get(); dhcp.hardwareType = bb.get(); dhcp.hardwareAddressLength = bb.get(); dhcp.hops = bb.get(); dhcp.transactionId = bb.getInt(); dhcp.seconds = bb.getShort(); dhcp.flags = bb.getShort(); dhcp.clientIPAddress = bb.getInt(); dhcp.yourIPAddress = bb.getInt(); dhcp.serverIPAddress = bb.getInt(); dhcp.gatewayIPAddress = bb.getInt(); final int hardwareAddressLength = 0xff & dhcp.hardwareAddressLength; dhcp.clientHardwareAddress = new byte[hardwareAddressLength]; bb.get(dhcp.clientHardwareAddress); for (int i = hardwareAddressLength; i < 16; ++i) { bb.get(); } dhcp.serverName = readString(bb, 64); dhcp.bootFileName = readString(bb, 128); // read the magic cookie // magic cookie bb.get(); bb.get(); bb.get(); bb.get(); // read options boolean foundEndOptionsMarker = false; while (bb.hasRemaining()) { final DHCPOption option = new DHCPOption(); int code = 0xff & bb.get(); // convert signed byte to int in range // [0,255] option.setCode((byte) code); if (code == 0) { // skip these continue; } else if (code != 255) { if (bb.hasRemaining()) { final int l = 0xff & bb.get(); // convert signed byte to // int in range [0,255] option.setLength((byte) l); if (bb.remaining() >= l) { final byte[] optionData = new byte[l]; bb.get(optionData); option.setData(optionData); dhcp.options.add(option); } else { throw new DeserializationException( "Buffer underflow while reading DHCP option"); } } } else if (code == 255) { DHCPOption end = new DHCPOption(); end.setCode((byte) 255); dhcp.options.add(end); // remaining bytes are supposed to be 0, but ignore them just in // case foundEndOptionsMarker = true; break; } } if (!foundEndOptionsMarker) { throw new DeserializationException("DHCP End options marker was missing"); } return dhcp; }; } @Override public String toString() { return toStringHelper(getClass()) .add("opCode", Byte.toString(opCode)) .add("hardwareType", Byte.toString(hardwareType)) .add("hardwareAddressLength", Byte.toString(hardwareAddressLength)) .add("hops", Byte.toString(hops)) .add("transactionId", Integer.toString(transactionId)) .add("seconds", Short.toString(seconds)) .add("flags", Short.toString(flags)) .add("clientIPAddress", Integer.toString(clientIPAddress)) .add("yourIPAddress", Integer.toString(yourIPAddress)) .add("serverIPAddress", Integer.toString(serverIPAddress)) .add("gatewayIPAddress", Integer.toString(gatewayIPAddress)) .add("clientHardwareAddress", Arrays.toString(clientHardwareAddress)) .add("serverName", serverName) .add("bootFileName", bootFileName) .toString(); // TODO: need to handle options } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.testframework.ui; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.testframework.TestConsoleProperties; import com.intellij.execution.testframework.TestFrameworkPropertyListener; import com.intellij.execution.testframework.TestTreeView; import com.intellij.execution.testframework.ToolbarPanel; import com.intellij.ide.util.PropertiesComponent; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.Disposer; import com.intellij.ui.ScrollPaneFactory; import com.intellij.ui.SideBorder; import com.intellij.util.ui.AwtVisitor; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; /** * @author yole */ public abstract class TestResultsPanel extends JPanel implements Disposable, DataProvider { private JScrollPane myLeftPane; private JComponent myStatisticsComponent; private Splitter myStatisticsSplitter; protected final JComponent myConsole; protected ToolbarPanel myToolbarPanel; protected final ExecutionEnvironment myEnvironment; private final String mySplitterProportionProperty; private final String myStatisticsSplitterProportionProperty; private final float mySplitterDefaultProportion; protected final AnAction[] myConsoleActions; protected final TestConsoleProperties myProperties; protected TestStatusLine myStatusLine; protected TestResultsPanel(@NotNull JComponent console, AnAction[] consoleActions, TestConsoleProperties properties, ExecutionEnvironment environment, String splitterProportionProperty, float splitterDefaultProportion) { super(new BorderLayout(0,1)); myConsole = console; myConsoleActions = consoleActions; myProperties = properties; myEnvironment = environment; mySplitterProportionProperty = splitterProportionProperty; mySplitterDefaultProportion = splitterDefaultProportion; myStatisticsSplitterProportionProperty = mySplitterProportionProperty + "_Statistics"; } public void initUI() { myLeftPane = ScrollPaneFactory.createScrollPane(); myLeftPane.putClientProperty(UIUtil.KEEP_BORDER_SIDES, SideBorder.TOP | SideBorder.RIGHT); myStatisticsComponent = createStatisticsPanel(); myStatusLine = createStatusLine(); JComponent testTreeView = createTestTreeView(); myToolbarPanel = createToolbarPanel(); Disposer.register(this, myToolbarPanel); final Splitter splitter = createSplitter(mySplitterProportionProperty, mySplitterDefaultProportion); Disposer.register(this, new Disposable(){ @Override public void dispose() { remove(splitter); splitter.dispose(); } }); add(splitter, BorderLayout.CENTER); final JPanel leftPanel = new JPanel(new BorderLayout()); leftPanel.add(myLeftPane, BorderLayout.CENTER); leftPanel.add(myToolbarPanel, BorderLayout.NORTH); splitter.setFirstComponent(leftPanel); myStatusLine.setMinimumSize(new Dimension(0, myStatusLine.getMinimumSize().height)); myStatusLine.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 5)); final JPanel rightPanel = new JPanel(new BorderLayout()); rightPanel.add(SameHeightPanel.wrap(myStatusLine, myToolbarPanel), BorderLayout.NORTH); myStatisticsSplitter = createSplitter(myStatisticsSplitterProportionProperty, 0.5f); new AwtVisitor(myConsole) { @Override public boolean visit(Component component) { if (component instanceof JScrollPane) { ((JScrollPane) component).putClientProperty(UIUtil.KEEP_BORDER_SIDES, SideBorder.TOP | SideBorder.LEFT); return true; } return false; } }; myStatisticsSplitter.setFirstComponent(createOutputTab(myConsole, myConsoleActions)); if (TestConsoleProperties.SHOW_STATISTICS.value(myProperties)) { showStatistics(); } myProperties.addListener(TestConsoleProperties.SHOW_STATISTICS, new TestFrameworkPropertyListener<Boolean>() { @Override public void onChanged(Boolean value) { if (value.booleanValue()) { showStatistics(); } else { myStatisticsSplitter.setSecondComponent(null); } } }); rightPanel.add(myStatisticsSplitter, BorderLayout.CENTER); splitter.setSecondComponent(rightPanel); testTreeView.setBorder(BorderFactory.createEmptyBorder(0, 3, 0, 0)); setLeftComponent(testTreeView); } private void showStatistics() { myStatisticsSplitter.setSecondComponent(myStatisticsComponent); } protected abstract JComponent createStatisticsPanel(); protected ToolbarPanel createToolbarPanel() { return new ToolbarPanel(myProperties, myEnvironment, this); } protected TestStatusLine createStatusLine() { return new TestStatusLine(); } protected abstract JComponent createTestTreeView(); @Nullable protected TestTreeView getTreeView() { return null; } @Nullable @Override public Object getData(@NonNls String dataId) { final TestTreeView view = getTreeView(); if (view != null) { return view.getData(dataId); } return null; } private static JComponent createOutputTab(JComponent console, AnAction[] consoleActions) { JPanel outputTab = new JPanel(new BorderLayout()); console.setFocusable(true); outputTab.add(console, BorderLayout.CENTER); final DefaultActionGroup actionGroup = new DefaultActionGroup(consoleActions); final ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, actionGroup, false); outputTab.add(toolbar.getComponent(), BorderLayout.EAST); return outputTab; } @Override public void dispose() { } protected static Splitter createSplitter(final String proportionProperty, final float defaultProportion) { final Splitter splitter = new Splitter(false); splitter.setHonorComponentsMinimumSize(true); final PropertiesComponent propertiesComponent = PropertiesComponent.getInstance(); float proportion; final String value = propertiesComponent.getValue(proportionProperty); if (value != null) { try { proportion = Float.parseFloat(value); } catch (NumberFormatException e) { proportion = defaultProportion; } } else { proportion = defaultProportion; } splitter.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(@NotNull final PropertyChangeEvent event) { if (event.getPropertyName().equals(Splitter.PROP_PROPORTION)) { propertiesComponent.setValue(proportionProperty, String.valueOf(splitter.getProportion())); } } }); splitter.setProportion(proportion); return splitter; } protected void setLeftComponent(final JComponent component) { if (component != myLeftPane.getViewport().getView()) myLeftPane.setViewportView(component); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.fasterxml.jackson.core.JsonParseException; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.support.InnerHitBuilder; import org.elasticsearch.script.Script.ScriptParseException; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.TestSearchContext; import org.junit.BeforeClass; import java.io.IOException; import java.util.Arrays; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQueryBuilder> { protected static final String PARENT_TYPE = "parent"; protected static final String CHILD_TYPE = "child"; @BeforeClass public static void beforeClass() throws Exception { MapperService mapperService = queryShardContext().getMapperService(); mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, "_parent", "type=" + PARENT_TYPE, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); mapperService.merge("just_a_type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("just_a_type" ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); } @Override protected void setSearchContext(String[] types) { final MapperService mapperService = queryShardContext().getMapperService(); final IndexFieldDataService fieldData = indexFieldDataService(); TestSearchContext testSearchContext = new TestSearchContext(queryShardContext()) { @Override public MapperService mapperService() { return mapperService; // need to build / parse inner hits sort fields } @Override public IndexFieldDataService fieldData() { return fieldData; // need to build / parse inner hits sort fields } }; testSearchContext.getQueryShardContext().setTypes(types); SearchContext.setCurrent(testSearchContext); } /** * @return a {@link HasChildQueryBuilder} with random values all over the place */ @Override protected HasParentQueryBuilder doCreateTestQueryBuilder() { HasParentQueryBuilder hqb = new HasParentQueryBuilder(PARENT_TYPE, RandomQueryBuilder.createQuery(random()),randomBoolean()); if (randomBoolean()) { hqb.innerHit(new InnerHitBuilder() .setName(randomAsciiOfLengthBetween(1, 10)) .setSize(randomIntBetween(0, 100)) .addSort(new FieldSortBuilder(STRING_FIELD_NAME_2).order(SortOrder.ASC))); } hqb.ignoreUnmapped(randomBoolean()); return hqb; } @Override protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { QueryBuilder innerQueryBuilder = queryBuilder.query(); if (innerQueryBuilder instanceof EmptyQueryBuilder) { assertNull(query); } else { assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class)); HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query; assertEquals(queryBuilder.score() ? ScoreMode.Max : ScoreMode.None, lpq.getScoreMode()); } if (queryBuilder.innerHit() != null) { assertNotNull(SearchContext.current()); if (query != null) { assertNotNull(SearchContext.current().innerHits()); assertEquals(1, SearchContext.current().innerHits().getInnerHits().size()); assertTrue(SearchContext.current().innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName())); InnerHitsContext.BaseInnerHits innerHits = SearchContext.current().innerHits() .getInnerHits().get(queryBuilder.innerHit().getName()); assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); assertEquals(innerHits.sort().getSort().length, 1); assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME_2); } else { assertThat(SearchContext.current().innerHits().getInnerHits().size(), equalTo(0)); } } } public void testIllegalValues() throws IOException { QueryBuilder query = RandomQueryBuilder.createQuery(random()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.hasParentQuery(null, query, false)); assertThat(e.getMessage(), equalTo("[has_parent] requires 'type' field")); e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.hasParentQuery("foo", null, false)); assertThat(e.getMessage(), equalTo("[has_parent] requires 'query' field")); QueryShardContext context = createShardContext(); HasParentQueryBuilder qb = QueryBuilders.hasParentQuery("just_a_type", new MatchAllQueryBuilder(), false); QueryShardException qse = expectThrows(QueryShardException.class, () -> qb.doToQuery(context)); assertThat(qse.getMessage(), equalTo("[has_parent] no child types found for type [just_a_type]")); } public void testDeprecatedXContent() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); builder.startObject(); builder.startObject("has_parent"); builder.field("query"); new TermQueryBuilder("a", "a").toXContent(builder, ToXContent.EMPTY_PARAMS); builder.field("type", "foo"); // deprecated builder.endObject(); builder.endObject(); try { parseQuery(builder.string()); fail("type is deprecated"); } catch (IllegalArgumentException ex) { assertEquals("Deprecated field [type] used, expected [parent_type] instead", ex.getMessage()); } HasParentQueryBuilder queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string(), ParseFieldMatcher.EMPTY); assertEquals("foo", queryBuilder.type()); boolean score = randomBoolean(); String key = RandomPicks.randomFrom(random(), Arrays.asList("score_mode", "scoreMode")); builder = XContentFactory.jsonBuilder().prettyPrint(); builder.startObject(); builder.startObject("has_parent"); builder.field("query"); new TermQueryBuilder("a", "a").toXContent(builder, ToXContent.EMPTY_PARAMS); builder.field(key, score ? "score": "none"); builder.field("parent_type", "foo"); builder.endObject(); builder.endObject(); try { parseQuery(builder.string()); fail(key + " is deprecated"); } catch (IllegalArgumentException ex) { assertEquals("Deprecated field [" + key + "] used, replaced by [score]", ex.getMessage()); } queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string(), ParseFieldMatcher.EMPTY); assertEquals(score, queryBuilder.score()); } public void testToQueryInnerQueryType() throws IOException { String[] searchTypes = new String[]{CHILD_TYPE}; QueryShardContext shardContext = createShardContext(); shardContext.setTypes(searchTypes); HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_TYPE, new IdsQueryBuilder().addIds("id"), false); Query query = hasParentQueryBuilder.toQuery(shardContext); //verify that the context types are still the same as the ones we previously set assertThat(shardContext.getTypes(), equalTo(searchTypes)); HasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_TYPE, "id"); } /** * override superclass test, because here we need to take care that mutation doesn't happen inside * `inner_hits` structure, because we don't parse them yet and so no exception will be triggered * for any mutation there. */ @Override public void testUnknownObjectException() throws IOException { String validQuery = createTestQueryBuilder().toString(); assertThat(validQuery, containsString("{")); int endPosition = validQuery.indexOf("inner_hits"); if (endPosition == -1) { endPosition = validQuery.length() - 1; } for (int insertionPosition = 0; insertionPosition < endPosition; insertionPosition++) { if (validQuery.charAt(insertionPosition) == '{') { String testQuery = validQuery.substring(0, insertionPosition) + "{ \"newField\" : " + validQuery.substring(insertionPosition) + "}"; try { parseQuery(testQuery); fail("some parsing exception expected for query: " + testQuery); } catch (ParsingException | ScriptParseException | ElasticsearchParseException e) { // different kinds of exception wordings depending on location // of mutation, so no simple asserts possible here } catch (JsonParseException e) { // mutation produced invalid json } } } } public void testFromJson() throws IOException { String json = "{\n" + " \"has_parent\" : {\n" + " \"query\" : {\n" + " \"term\" : {\n" + " \"tag\" : {\n" + " \"value\" : \"something\",\n" + " \"boost\" : 1.0\n" + " }\n" + " }\n" + " },\n" + " \"parent_type\" : \"blog\",\n" + " \"score\" : true,\n" + " \"ignore_unmapped\" : false,\n" + " \"boost\" : 1.0\n" + " }\n" + "}"; HasParentQueryBuilder parsed = (HasParentQueryBuilder) parseQuery(json); checkGeneratedJson(json, parsed); assertEquals(json, "blog", parsed.type()); assertEquals(json, "something", ((TermQueryBuilder) parsed.query()).value()); } public void testIgnoreUnmapped() throws IOException { final HasParentQueryBuilder queryBuilder = new HasParentQueryBuilder("unmapped", new MatchAllQueryBuilder(), false); queryBuilder.ignoreUnmapped(true); Query query = queryBuilder.toQuery(queryShardContext()); assertThat(query, notNullValue()); assertThat(query, instanceOf(MatchNoDocsQuery.class)); final HasParentQueryBuilder failingQueryBuilder = new HasParentQueryBuilder("unmapped", new MatchAllQueryBuilder(), false); failingQueryBuilder.ignoreUnmapped(false); QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(queryShardContext())); assertThat(e.getMessage(), containsString("[" + HasParentQueryBuilder.NAME + "] query configured 'parent_type' [unmapped] is not a valid type")); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.idea.svn; import com.intellij.openapi.fileEditor.impl.LoadTextUtil; import com.intellij.openapi.vcs.VcsConfiguration; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.VcsTestUtil; import com.intellij.openapi.vcs.changes.Change; import com.intellij.openapi.vcs.changes.ui.RollbackWorker; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.Nullable; import org.junit.Test; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import static com.intellij.openapi.vfs.VfsUtilCore.toVirtualFileArray; import static com.intellij.openapi.vfs.VfsUtilCore.virtualToIoFile; import static com.intellij.testFramework.vcs.DuringChangeListManagerUpdateTestScheme.checkDeletedFilesAreInList; import static com.intellij.testFramework.vcs.DuringChangeListManagerUpdateTestScheme.checkFilesAreInList; import static org.junit.Assert.*; public class SvnChangesCorrectlyRefreshedTest extends SvnTestCase { @Override public void setUp() throws Exception { super.setUp(); enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD); enableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE); } @Test public void testModificationAndAfterRevert() throws Exception { final SubTree subTree = new SubTree(myWorkingCopyDir); checkin(); VcsTestUtil.editFileInCommand(myProject, subTree.myS1File, "new content"); final CharSequence text1 = LoadTextUtil.loadText(subTree.myS1File); assertEquals("new content", text1.toString()); LocalFileSystem.getInstance().refreshAndFindFileByIoFile(virtualToIoFile(subTree.myS1File)); refreshChanges(); final VcsException updateException = changeListManager.getUpdateException(); if (updateException != null) { updateException.printStackTrace(); } if (!RepeatSvnActionThroughBusy.ourBusyExceptionProcessor.process(updateException)) { assertNull(updateException == null ? null : updateException.getMessage(), updateException); } checkFilesAreInList(new VirtualFile[]{subTree.myS1File}, changeListManager.getDefaultListName(), changeListManager); final Collection<Change> changes = changeListManager.getDefaultChangeList().getChanges(); final RollbackWorker worker = new RollbackWorker(myProject); worker.doRollback(changes, false); final CharSequence text = LoadTextUtil.loadText(subTree.myS1File); assertEquals(SubTree.ourS1Contents, text.toString()); refreshChanges(); checkFilesAreInList(VirtualFile.EMPTY_ARRAY, changeListManager.getDefaultListName(), changeListManager); } @Test public void testRenameFileAndAfterRevert() throws Throwable { final SubTree subTree = new SubTree(myWorkingCopyDir); checkin(); final String newName = "newName"; renameFileInCommand(subTree.myS1File, newName); assertVF(subTree.mySourceDir, newName); refreshChanges(); checkFilesAreInList(new VirtualFile[]{subTree.myS1File}, changeListManager.getDefaultListName(), changeListManager); final Collection<Change> changes = changeListManager.getDefaultChangeList().getChanges(); final RollbackWorker worker = new RollbackWorker(myProject); worker.doRollback(changes, false); assertVF(subTree.mySourceDir, "s1.txt"); changeListManager.ensureUpToDate(); checkFilesAreInList(VirtualFile.EMPTY_ARRAY, changeListManager.getDefaultListName(), changeListManager); } @Test public void testMoveFileAndAfterRevert() throws Throwable { final SubTree subTree = new SubTree(myWorkingCopyDir); checkin(); moveFileInCommand(subTree.myS1File, subTree.myTargetDir); assertVF(subTree.myTargetDir, "s1.txt"); refreshChanges(); checkFilesAreInList(new VirtualFile[]{subTree.myS1File}, changeListManager.getDefaultListName(), changeListManager); final Collection<Change> changes = changeListManager.getDefaultChangeList().getChanges(); final RollbackWorker worker = new RollbackWorker(myProject); worker.doRollback(changes, false); assertVF(subTree.mySourceDir, "s1.txt"); changeListManager.ensureUpToDate(); checkFilesAreInList(VirtualFile.EMPTY_ARRAY, changeListManager.getDefaultListName(), changeListManager); } @Test public void testRenameDirAndAfterRevert() throws Throwable { final SubTree subTree = new SubTree(myWorkingCopyDir); checkin(); final String newName = "newName"; renameFileInCommand(subTree.mySourceDir, newName); assertVF(subTree.myRootDir, newName); assertVF(subTree.mySourceDir, "s1.txt"); assertVF(subTree.mySourceDir, "s2.txt"); refreshChanges(); checkFilesAreInList(new VirtualFile[]{subTree.mySourceDir, subTree.myS1File, subTree.myS2File}, changeListManager.getDefaultListName(), changeListManager); final Collection<Change> changes = changeListManager.getDefaultChangeList().getChanges(); final RollbackWorker worker = new RollbackWorker(myProject); worker.doRollback(changes, false); subTree.mySourceDir = assertVF(subTree.myRootDir, "source"); assertTrue(subTree.mySourceDir.getPath().endsWith("/root/source")); assertVF(subTree.mySourceDir, "s1.txt"); assertVF(subTree.mySourceDir, "s2.txt"); changeListManager.ensureUpToDate(); checkFilesAreInList(VirtualFile.EMPTY_ARRAY, changeListManager.getDefaultListName(), changeListManager); } @Test public void testMoveDirEditFileAndAfterRevert() throws Throwable { final SubTree subTree = new SubTree(myWorkingCopyDir); checkin(); moveFileInCommand(subTree.mySourceDir, subTree.myTargetDir); assertTrue(subTree.mySourceDir.getPath().endsWith("/target/source")); assertVF(subTree.myTargetDir, "source"); VcsTestUtil.editFileInCommand(myProject, subTree.myS1File, "new"); final CharSequence text1 = LoadTextUtil.loadText(subTree.myS1File); assertEquals("new", text1.toString()); refreshChanges(); checkFilesAreInList(new VirtualFile[]{subTree.mySourceDir, subTree.myS1File, subTree.myS2File}, changeListManager.getDefaultListName(), changeListManager); final Collection<Change> changes = changeListManager.getDefaultChangeList().getChanges(); final RollbackWorker worker = new RollbackWorker(myProject); worker.doRollback(changes, false); subTree.mySourceDir = assertVF(subTree.myRootDir, "source"); assertTrue(subTree.mySourceDir.getPath().endsWith("/root/source")); subTree.myS1File = assertVF(subTree.mySourceDir, "s1.txt"); subTree.myS2File = assertVF(subTree.mySourceDir, "s2.txt"); final CharSequence text = LoadTextUtil.loadText(subTree.myS1File); assertEquals(SubTree.ourS1Contents, text.toString()); changeListManager.ensureUpToDate(); checkFilesAreInList(VirtualFile.EMPTY_ARRAY, changeListManager.getDefaultListName(), changeListManager); } @Test public void testAddDirEditFileAndAfterRevert() { final SubTree subTree = new SubTree(myWorkingCopyDir); refreshChanges(); final List<VirtualFile> files = getAllFiles(subTree); checkFilesAreInList(toVirtualFileArray(files), changeListManager.getDefaultListName(), changeListManager); final Collection<Change> changes = changeListManager.getDefaultChangeList().getChanges(); final RollbackWorker worker = new RollbackWorker(myProject); worker.doRollback(changes, false); assertVF(subTree.myRootDir, "source"); assertVF(subTree.mySourceDir, "s1.txt"); assertVF(subTree.myRootDir, "target"); changeListManager.ensureUpToDate(); checkFilesAreInList(VirtualFile.EMPTY_ARRAY, changeListManager.getDefaultListName(), changeListManager); for (VirtualFile file : files) { assertTrue(file.getPath(), changeListManager.isUnversioned(file)); } } private List<VirtualFile> getAllFiles(final SubTree subTree) { final List<VirtualFile> files = new ArrayList<>(); files.addAll(Arrays.asList(subTree.myRootDir, subTree.mySourceDir, subTree.myS2File, subTree.myS1File, subTree.myTargetDir)); files.addAll(subTree.myTargetFiles); return files; } @Test public void testDeleteDirEditFileAndAfterRevert() throws Throwable { final SubTree subTree = new SubTree(myWorkingCopyDir); checkin(); deleteFileInCommand(subTree.myRootDir); refreshChanges(); final List<VirtualFile> files = getAllFiles(subTree); checkDeletedFilesAreInList(toVirtualFileArray(files), changeListManager.getDefaultListName(), changeListManager); final Collection<Change> changes = changeListManager.getDefaultChangeList().getChanges(); final RollbackWorker worker = new RollbackWorker(myProject); worker.doRollback(changes, false); // VirtualFile instances are invalid after deletion above - find them again after rollback subTree.refresh(false); assertVF(subTree.myRootDir, "source"); assertVF(subTree.mySourceDir, "s1.txt"); assertVF(subTree.myRootDir, "target"); assertVF(subTree.myTargetDir, "t10.txt"); assertVF(subTree.myTargetDir, "t11.txt"); changeListManager.ensureUpToDate(); checkFilesAreInList(VirtualFile.EMPTY_ARRAY, changeListManager.getDefaultListName(), changeListManager); } @Nullable private static VirtualFile assertVF(final VirtualFile parent, final String name) { final VirtualFile[] files = parent.getChildren(); for (VirtualFile file : files) { if (name.equals(file.getName())) return file; } System.out.println("not found as child"); assertNotNull(LocalFileSystem.getInstance().findFileByIoFile(new File(parent.getPath(), name))); return null; } }
package org.basex.query; import static org.basex.query.QueryError.*; import java.util.*; import org.basex.query.func.*; import org.basex.query.util.*; import org.basex.query.value.item.*; import org.basex.query.var.*; import org.basex.util.*; import org.basex.util.list.*; /** * This class compiles all components of the query that are needed in an order that * maximizes the amount of inlining possible. * * @author BaseX Team 2005-16, BSD License * @author Leo Woerteler */ final class QueryCompiler { /** Number of scopes from which on linear search is replaced by a hash map. */ private static final int MAP_THRESHOLD = 16; /** Query context. */ private final QueryContext qc; /** Result list. */ private final ArrayList<Scope[]> result = new ArrayList<>(); /** Node stack. */ private final IntList stack = new IntList(); /** Index and lowlink list. */ private final IntList list = new IntList(); /** Counter for the next free index. */ private int next; /** Adjacency list. */ private final ArrayList<int[]> adjacent = new ArrayList<>(); /** Declaration list. */ private final ArrayList<Scope> scopes = new ArrayList<>(); /** Declaration list. */ private IdentityHashMap<Scope, Integer> ids; /** * Constructor. * @param qc query context * @param root root expression */ private QueryCompiler(final QueryContext qc, final Scope root) { this.qc = qc; add(root); } /** * Gathers all declarations (functions and static variables) used by the given main module. * @param main the main module to start from * @return list of all declarations that the main module uses */ public static List<StaticDecl> usedDecls(final MainModule main) { final List<StaticDecl> scopes = new ArrayList<>(); final IdentityHashMap<Scope, Object> map = new IdentityHashMap<>(); main.visit(new ASTVisitor() { @Override public boolean staticVar(final StaticVar var) { if(map.put(var, var) == null) { var.visit(this); scopes.add(var); } return true; } @Override public boolean staticFuncCall(final StaticFuncCall call) { final StaticFunc f = call.func(); if(map.put(f, f) == null) { f.visit(this); scopes.add(f); } return true; } @Override public boolean inlineFunc(final Scope sub) { if(map.put(sub, sub) == null) sub.visit(this); return true; } @Override public boolean funcItem(final FuncItem func) { if(map.put(func, func) == null) func.visit(this); return true; } }); return scopes; } /** * Compiles all necessary parts of this query. * @param qc query context * @param root root expression * @throws QueryException compilation errors */ public static void compile(final QueryContext qc, final MainModule root) throws QueryException { if(!root.compiled()) new QueryCompiler(qc, root).compile(); } /** * Compiles all necessary parts of this query. * @throws QueryException compilation errors */ private void compile() throws QueryException { // compile the used scopes only for(final Scope[] comp : components(0)) circCheck(comp).compile(qc); // check for circular variable declarations without compiling the unused scopes for(final StaticVar v : qc.vars) { if(id(v) == -1) for(final Scope[] comp : components(add(v))) circCheck(comp); } } /** * Checks if the given component contains a static variable that depends on itself. * @param comp component to check * @return scope to be compiled, the others are compiled recursively * @throws QueryException query exception */ private static Scope circCheck(final Scope[] comp) throws QueryException { if(comp.length > 1) { for(final Scope scp : comp) { if(scp instanceof StaticVar) throw circVarError((StaticVar) scp); } } return comp[0]; } /** * Returns the strongly connected components of the dependency graph. * @param p ID of the starting point * @return the components * @throws QueryException if a variable directly calls itself */ private Iterable<Scope[]> components(final int p) throws QueryException { result.clear(); tarjan(p); return result; } /** * Algorithm of Tarjan for computing the strongly connected components of a graph. * @param v current node * @throws QueryException if a variable directly calls itself */ private void tarjan(final int v) throws QueryException { final int ixv = 2 * v, llv = ixv + 1, idx = next++; while(list.size() <= llv) list.add(-1); list.set(ixv, idx); list.set(llv, idx); stack.push(v); for(final int w : adjacentTo(v)) { final int ixw = 2 * w, llw = ixw + 1; if(list.size() <= ixw || list.get(ixw) < 0) { // Successor w has not yet been visited; recurse on it tarjan(w); list.set(llv, Math.min(list.get(llv), list.get(llw))); } else if(stack.contains(w)) { // Successor w is in stack S and hence in the current SCC list.set(llv, Math.min(list.get(llv), list.get(ixw))); } } // If v is a root node, pop the stack and generate an SCC if(list.get(llv) == list.get(ixv)) { int w; Scope[] out = null; do { w = stack.pop(); final Scope scp = scopes.get(w); out = out == null ? new Scope[] { scp } : Array.add(out, scp); } while(w != v); result.add(out); } } /** * Gets the ID of the given scope. * @param scp scope * @return id if existing, {@code null} otherwise */ private int id(final Scope scp) { if(ids != null) { final Integer id = ids.get(scp); return id == null ? -1 : id; } final int ss = scopes.size(); for(int s = 0; s < ss; s++) if(scopes.get(s) == scp) return s; return -1; } /** * Adds a new scope and returns its ID. * @param scp scope to add * @return the scope's ID */ private int add(final Scope scp) { final int id = scopes.size(); if(id == MAP_THRESHOLD) { ids = new IdentityHashMap<>(); for(final Scope s : scopes) ids.put(s, ids.size()); } scopes.add(scp); adjacent.add(null); if(ids != null) ids.put(scp, id); return id; } /** * Returns the indices of all scopes called by the given one. * @param node source node index * @return destination node indices * @throws QueryException if a variable directly calls itself */ private int[] adjacentTo(final int node) throws QueryException { int[] adj = adjacent.get(node); if(adj == null) { adj = neighbors(scopes.get(node)); adjacent.set(node, adj); } return adj; } /** * Fills in all used scopes of the given one. * @param curr current scope * @return IDs of all directly reachable scopes * @throws QueryException if a variable directly calls itself */ private int[] neighbors(final Scope curr) throws QueryException { final IntList adj = new IntList(0); final boolean ok = curr.visit(new ASTVisitor() { @Override public boolean staticVar(final StaticVar var) { return var != curr && neighbor(var); } @Override public boolean staticFuncCall(final StaticFuncCall call) { return neighbor(call.func()); } @Override public boolean inlineFunc(final Scope sub) { return sub.visit(this); } @Override public boolean funcItem(final FuncItem func) { return neighbor(func); } /** * Adds a neighbor of the currently inspected scope. * @param scp the neighbor * @return {@code true} for convenience */ private boolean neighbor(final Scope scp) { final int old = id(scp), id = old == -1 ? add(scp) : old; if(old == -1 || !adj.contains(id)) adj.add(id); return true; } }); if(!ok) { final StaticVar var = (StaticVar) curr; throw CIRCREF_X.get(var.info, "$" + var.name); } return adj.finish(); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.admin; import org.junit.Test; import org.keycloak.common.constants.KerberosConstants; import org.keycloak.common.util.MultivaluedHashMap; import org.keycloak.events.admin.OperationType; import org.keycloak.events.admin.ResourceType; import org.keycloak.models.AuthenticationExecutionModel; import org.keycloak.models.LDAPConstants; import org.keycloak.representations.idm.AuthenticationExecutionInfoRepresentation; import org.keycloak.representations.idm.ComponentRepresentation; import org.keycloak.storage.UserStorageProvider; import org.keycloak.testsuite.Assert; import org.keycloak.testsuite.admin.authentication.AbstractAuthenticationTest; import org.keycloak.testsuite.util.AdminEventPaths; import javax.ws.rs.BadRequestException; import javax.ws.rs.core.Response; import java.util.List; /** * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> */ public class UserStorageRestTest extends AbstractAdminTest { private AuthenticationExecutionInfoRepresentation findKerberosExecution() { AuthenticationExecutionInfoRepresentation kerberosExecution = null; List<AuthenticationExecutionInfoRepresentation> executionReps = realm.flows().getExecutions("browser"); kerberosExecution = AbstractAuthenticationTest.findExecutionByProvider("auth-spnego", executionReps); Assert.assertNotNull(kerberosExecution); return kerberosExecution; } private String createComponent(ComponentRepresentation rep) { Response resp = realm.components().add(rep); Assert.assertEquals(201, resp.getStatus()); resp.close(); String id = ApiUtil.getCreatedId(resp); assertAdminEvents.clear(); return id; } private void removeComponent(String id) { realm.components().component(id).remove(); assertAdminEvents.clear(); } private void assertFederationProvider(ComponentRepresentation rep, String id, String displayName, String providerId, String... config) { Assert.assertEquals(id, rep.getId()); Assert.assertEquals(displayName, rep.getName()); Assert.assertEquals(providerId, rep.getProviderId()); Assert.assertMultivaluedMap(rep.getConfig(), config); } @Test public void testKerberosAuthenticatorEnabledAutomatically() { // Assert kerberos authenticator DISABLED AuthenticationExecutionInfoRepresentation kerberosExecution = findKerberosExecution(); Assert.assertEquals(kerberosExecution.getRequirement(), AuthenticationExecutionModel.Requirement.DISABLED.toString()); // create LDAP provider with kerberos ComponentRepresentation ldapRep = new ComponentRepresentation(); ldapRep.setName("ldap2"); ldapRep.setProviderId("ldap"); ldapRep.setProviderType(UserStorageProvider.class.getName()); ldapRep.setConfig(new MultivaluedHashMap<>()); ldapRep.getConfig().putSingle("priority", Integer.toString(2)); ldapRep.getConfig().putSingle(KerberosConstants.ALLOW_KERBEROS_AUTHENTICATION, "true"); String id = createComponent(ldapRep); // Assert kerberos authenticator ALTERNATIVE kerberosExecution = findKerberosExecution(); Assert.assertEquals(kerberosExecution.getRequirement(), AuthenticationExecutionModel.Requirement.ALTERNATIVE.toString()); // Switch kerberos authenticator to DISABLED kerberosExecution.setRequirement(AuthenticationExecutionModel.Requirement.DISABLED.toString()); realm.flows().updateExecutions("browser", kerberosExecution); assertAdminEvents.assertEvent(realmId, OperationType.UPDATE, AdminEventPaths.authUpdateExecutionPath("browser"), kerberosExecution, ResourceType.AUTH_EXECUTION); // update LDAP provider with kerberos ldapRep = realm.components().component(id).toRepresentation(); realm.components().component(id).update(ldapRep); assertAdminEvents.clear(); // Assert kerberos authenticator ALTERNATIVE kerberosExecution = findKerberosExecution(); Assert.assertEquals(kerberosExecution.getRequirement(), AuthenticationExecutionModel.Requirement.ALTERNATIVE.toString()); // Cleanup kerberosExecution.setRequirement(AuthenticationExecutionModel.Requirement.DISABLED.toString()); realm.flows().updateExecutions("browser", kerberosExecution); assertAdminEvents.assertEvent(realmId, OperationType.UPDATE, AdminEventPaths.authUpdateExecutionPath("browser"), kerberosExecution, ResourceType.AUTH_EXECUTION); removeComponent(id); } @Test public void testKerberosAuthenticatorChangedOnlyIfDisabled() { // Change kerberos to REQUIRED AuthenticationExecutionInfoRepresentation kerberosExecution = findKerberosExecution(); kerberosExecution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED.toString()); realm.flows().updateExecutions("browser", kerberosExecution); assertAdminEvents.assertEvent(realmId, OperationType.UPDATE, AdminEventPaths.authUpdateExecutionPath("browser"), kerberosExecution, ResourceType.AUTH_EXECUTION); // create LDAP provider with kerberos ComponentRepresentation ldapRep = new ComponentRepresentation(); ldapRep.setName("ldap2"); ldapRep.setProviderId("ldap"); ldapRep.setProviderType(UserStorageProvider.class.getName()); ldapRep.setConfig(new MultivaluedHashMap<>()); ldapRep.getConfig().putSingle("priority", Integer.toString(2)); ldapRep.getConfig().putSingle(KerberosConstants.ALLOW_KERBEROS_AUTHENTICATION, "true"); String id = createComponent(ldapRep); // Assert kerberos authenticator still REQUIRED kerberosExecution = findKerberosExecution(); Assert.assertEquals(kerberosExecution.getRequirement(), AuthenticationExecutionModel.Requirement.REQUIRED.toString()); // update LDAP provider with kerberos ldapRep = realm.components().component(id).toRepresentation(); realm.components().component(id).update(ldapRep); assertAdminEvents.clear(); // Assert kerberos authenticator still REQUIRED kerberosExecution = findKerberosExecution(); Assert.assertEquals(kerberosExecution.getRequirement(), AuthenticationExecutionModel.Requirement.REQUIRED.toString()); // Cleanup kerberosExecution.setRequirement(AuthenticationExecutionModel.Requirement.DISABLED.toString()); realm.flows().updateExecutions("browser", kerberosExecution); assertAdminEvents.assertEvent(realmId, OperationType.UPDATE, AdminEventPaths.authUpdateExecutionPath("browser"), kerberosExecution, ResourceType.AUTH_EXECUTION); removeComponent(id); } // KEYCLOAK-4438 @Test public void testKerberosAuthenticatorDisabledWhenProviderRemoved() { // Assert kerberos authenticator DISABLED AuthenticationExecutionInfoRepresentation kerberosExecution = findKerberosExecution(); Assert.assertEquals(kerberosExecution.getRequirement(), AuthenticationExecutionModel.Requirement.DISABLED.toString()); // create LDAP provider with kerberos ComponentRepresentation ldapRep = new ComponentRepresentation(); ldapRep.setName("ldap2"); ldapRep.setProviderId("ldap"); ldapRep.setProviderType(UserStorageProvider.class.getName()); ldapRep.setConfig(new MultivaluedHashMap<>()); ldapRep.getConfig().putSingle("priority", Integer.toString(2)); ldapRep.getConfig().putSingle(KerberosConstants.ALLOW_KERBEROS_AUTHENTICATION, "true"); String id = createComponent(ldapRep); // Assert kerberos authenticator ALTERNATIVE kerberosExecution = findKerberosExecution(); Assert.assertEquals(kerberosExecution.getRequirement(), AuthenticationExecutionModel.Requirement.ALTERNATIVE.toString()); // Remove LDAP provider realm.components().component(id).remove(); // Assert kerberos authenticator DISABLED kerberosExecution = findKerberosExecution(); Assert.assertEquals(kerberosExecution.getRequirement(), AuthenticationExecutionModel.Requirement.DISABLED.toString()); // Add kerberos provider ComponentRepresentation kerberosRep = new ComponentRepresentation(); kerberosRep.setName("kerberos"); kerberosRep.setProviderId("kerberos"); kerberosRep.setProviderType(UserStorageProvider.class.getName()); kerberosRep.setConfig(new MultivaluedHashMap<>()); kerberosRep.getConfig().putSingle("priority", Integer.toString(2)); id = createComponent(kerberosRep); // Assert kerberos authenticator ALTERNATIVE kerberosExecution = findKerberosExecution(); Assert.assertEquals(kerberosExecution.getRequirement(), AuthenticationExecutionModel.Requirement.ALTERNATIVE.toString()); // Switch kerberos authenticator to REQUIRED kerberosExecution.setRequirement(AuthenticationExecutionModel.Requirement.REQUIRED.toString()); realm.flows().updateExecutions("browser", kerberosExecution); // Remove Kerberos provider realm.components().component(id).remove(); // Assert kerberos authenticator DISABLED kerberosExecution = findKerberosExecution(); Assert.assertEquals(kerberosExecution.getRequirement(), AuthenticationExecutionModel.Requirement.DISABLED.toString()); } @Test public void testValidateAndCreateLdapProvider() { // Invalid filter ComponentRepresentation ldapRep = new ComponentRepresentation(); ldapRep.setName("ldap2"); ldapRep.setProviderId("ldap"); ldapRep.setProviderType(UserStorageProvider.class.getName()); ldapRep.setConfig(new MultivaluedHashMap<>()); ldapRep.getConfig().putSingle("priority", Integer.toString(2)); ldapRep.getConfig().putSingle(LDAPConstants.CUSTOM_USER_SEARCH_FILTER, "dc=something"); Response resp = realm.components().add(ldapRep); Assert.assertEquals(400, resp.getStatus()); resp.close(); // Invalid filter ldapRep.getConfig().putSingle(LDAPConstants.CUSTOM_USER_SEARCH_FILTER, "(dc=something"); resp = realm.components().add(ldapRep); Assert.assertEquals(400, resp.getStatus()); resp.close(); // Invalid filter ldapRep.getConfig().putSingle(LDAPConstants.CUSTOM_USER_SEARCH_FILTER, "dc=something)"); resp = realm.components().add(ldapRep); Assert.assertEquals(400, resp.getStatus()); resp.close(); // Assert nothing created so far Assert.assertTrue(realm.components().query(realmId, UserStorageProvider.class.getName()).isEmpty()); assertAdminEvents.assertEmpty(); // Valid filter. Creation success ldapRep.getConfig().putSingle(LDAPConstants.CUSTOM_USER_SEARCH_FILTER, "(dc=something)"); String id1 = createComponent(ldapRep); // Missing filter is ok too. Creation success ComponentRepresentation ldapRep2 = new ComponentRepresentation(); ldapRep2.setName("ldap3"); ldapRep2.setProviderId("ldap"); ldapRep2.setProviderType(UserStorageProvider.class.getName()); ldapRep2.setConfig(new MultivaluedHashMap<>()); ldapRep2.getConfig().putSingle("priority", Integer.toString(2)); ldapRep2.getConfig().putSingle(LDAPConstants.BIND_DN, "cn=manager"); ldapRep2.getConfig().putSingle(LDAPConstants.BIND_CREDENTIAL, "password"); String id2 = createComponent(ldapRep2); // Assert both providers created List<ComponentRepresentation> providerInstances = realm.components().query(realmId, UserStorageProvider.class.getName()); Assert.assertEquals(providerInstances.size(), 2); // Cleanup removeComponent(id1); removeComponent(id2); } @Test public void testUpdateProvider() { ComponentRepresentation ldapRep = new ComponentRepresentation(); ldapRep.setName("ldap2"); ldapRep.setProviderId("ldap"); ldapRep.setProviderType(UserStorageProvider.class.getName()); ldapRep.setConfig(new MultivaluedHashMap<>()); ldapRep.getConfig().putSingle("priority", Integer.toString(2)); ldapRep.getConfig().putSingle(LDAPConstants.BIND_DN, "cn=manager"); ldapRep.getConfig().putSingle(LDAPConstants.BIND_CREDENTIAL, "password"); String id = createComponent(ldapRep); // Assert update with invalid filter should fail ldapRep = realm.components().component(id).toRepresentation(); ldapRep.getConfig().putSingle(LDAPConstants.CUSTOM_USER_SEARCH_FILTER, "(dc=something2"); ldapRep.getConfig().putSingle(LDAPConstants.BIND_DN, "cn=manager-updated"); try { realm.components().component(id).update(ldapRep); Assert.fail("Not expected to successfull update"); } catch (BadRequestException bre) { // Expected } // Assert nothing was updated assertFederationProvider(realm.components().component(id).toRepresentation(), id, "ldap2", "ldap", LDAPConstants.BIND_DN, "cn=manager", LDAPConstants.BIND_CREDENTIAL, "**********"); // Change filter to be valid ldapRep.getConfig().putSingle(LDAPConstants.CUSTOM_USER_SEARCH_FILTER, "(dc=something2)"); realm.components().component(id).update(ldapRep); assertAdminEvents.clear(); // Assert updated successfully ldapRep = realm.components().component(id).toRepresentation(); assertFederationProvider(ldapRep, id, "ldap2", "ldap", LDAPConstants.BIND_DN, "cn=manager-updated", LDAPConstants.BIND_CREDENTIAL, "**********", LDAPConstants.CUSTOM_USER_SEARCH_FILTER, "(dc=something2)"); // Assert update displayName ldapRep.setName("ldap2"); realm.components().component(id).update(ldapRep); assertFederationProvider(realm.components().component(id).toRepresentation(), id, "ldap2", "ldap",LDAPConstants.BIND_DN, "cn=manager-updated", LDAPConstants.BIND_CREDENTIAL, "**********", LDAPConstants.CUSTOM_USER_SEARCH_FILTER, "(dc=something2)"); // Cleanup removeComponent(id); } /* @Test public void testProviderFactories() { List<UserFederationProviderFactoryRepresentation> providerFactories = userFederation().getProviderFactories(); Assert.assertNames(providerFactories, "ldap", "kerberos", "dummy", "dummy-configurable"); // Builtin provider without properties UserFederationProviderFactoryRepresentation ldapProvider = userFederation().getProviderFactory("ldap"); Assert.assertEquals(ldapProvider.getId(), "ldap"); Assert.assertEquals(0, ldapProvider.getOptions().size()); // Configurable through the "old-way" options UserFederationProviderFactoryRepresentation dummyProvider = userFederation().getProviderFactory("dummy"); Assert.assertEquals(dummyProvider.getId(), "dummy"); Assert.assertNames(new LinkedList<>(dummyProvider.getOptions()), "important.config"); // Configurable through the "new-way" ConfiguredProvider UserFederationProviderFactoryRepresentation dummyConfiguredProvider = userFederation().getProviderFactory("dummy-configurable"); Assert.assertEquals(dummyConfiguredProvider.getId(), "dummy-configurable"); Assert.assertTrue(dummyConfiguredProvider.getOptions() == null || dummyConfiguredProvider.getOptions().isEmpty()); Assert.assertEquals("Dummy User Federation Provider Help Text", dummyConfiguredProvider.getHelpText()); Assert.assertEquals(2, dummyConfiguredProvider.getProperties().size()); Assert.assertProviderConfigProperty(dummyConfiguredProvider.getProperties().get(0), "prop1", "Prop1", "prop1Default", "Prop1 HelpText", ProviderConfigProperty.STRING_TYPE); Assert.assertProviderConfigProperty(dummyConfiguredProvider.getProperties().get(1), "prop2", "Prop2", "true", "Prop2 HelpText", ProviderConfigProperty.BOOLEAN_TYPE); try { userFederation().getProviderFactory("not-existent"); Assert.fail("Not expected to find not-existent provider"); } catch (NotFoundException nfe) { // Expected } } private UserFederationProvidersResource userFederation() { return null;//realm.userFederation(); } @Test public void testCreateProvider() { // create provider without configuration and displayName UserFederationProviderRepresentation dummyRep1 = UserFederationProviderBuilder.create() .providerName("dummy") .displayName("") .priority(2) .fullSyncPeriod(1000) .changedSyncPeriod(500) .lastSync(123) .build(); String id1 = createUserFederationProvider(dummyRep1); // create provider with configuration and displayName UserFederationProviderRepresentation dummyRep2 = UserFederationProviderBuilder.create() .providerName("dummy") .displayName("dn1") .priority(1) .configProperty("prop1", "prop1Val") .configProperty("prop2", "true") .build(); String id2 = createUserFederationProvider(dummyRep2); // Assert provider instances available assertFederationProvider(userFederation().get(id1).toRepresentation(), id1, id1, "dummy", 2, 1000, 500, 123); assertFederationProvider(userFederation().get(id2).toRepresentation(), id2, "dn1", "dummy", 1, -1, -1, -1, "prop1", "prop1Val", "prop2", "true"); // Assert sorted List<UserFederationProviderRepresentation> providerInstances = userFederation().getProviderInstances(); Assert.assertEquals(providerInstances.size(), 2); assertFederationProvider(providerInstances.get(0), id2, "dn1", "dummy", 1, -1, -1, -1, "prop1", "prop1Val", "prop2", "true"); assertFederationProvider(providerInstances.get(1), id1, id1, "dummy", 2, 1000, 500, 123); // Remove providers removeUserFederationProvider(id1); removeUserFederationProvider(id2); } @Test (expected = NotFoundException.class) public void testLookupNotExistentProvider() { userFederation().get("not-existent").toRepresentation(); } @Test public void testSyncFederationProvider() { // create provider UserFederationProviderRepresentation dummyRep1 = UserFederationProviderBuilder.create() .providerName("dummy") .build(); String id1 = createUserFederationProvider(dummyRep1); // Sync with unknown action shouldn't pass try { userFederation().get(id1).syncUsers("unknown"); Assert.fail("Not expected to sync with unknown action"); } catch (NotFoundException nfe) { // Expected } // Assert sync didn't happen Assert.assertEquals(-1, userFederation().get(id1).toRepresentation().getLastSync()); // Sync and assert it happened SynchronizationResultRepresentation syncResult = userFederation().get(id1).syncUsers("triggerFullSync"); Assert.assertEquals("0 imported users, 0 updated users", syncResult.getStatus()); Map<String, Object> eventRep = new HashMap<>(); eventRep.put("action", "triggerFullSync"); assertAdminEvents.assertEvent(realmId, OperationType.ACTION, AdminEventPaths.userFederationResourcePath(id1) + "/sync", eventRep, ResourceType.USER_FEDERATION_PROVIDER); int fullSyncTime = userFederation().get(id1).toRepresentation().getLastSync(); Assert.assertTrue(fullSyncTime > 0); // Changed sync setTimeOffset(50); syncResult = userFederation().get(id1).syncUsers("triggerChangedUsersSync"); eventRep.put("action", "triggerChangedUsersSync"); assertAdminEvents.assertEvent(realmId, OperationType.ACTION, AdminEventPaths.userFederationResourcePath(id1) + "/sync", eventRep, ResourceType.USER_FEDERATION_PROVIDER); Assert.assertEquals("0 imported users, 0 updated users", syncResult.getStatus()); int changedSyncTime = userFederation().get(id1).toRepresentation().getLastSync(); Assert.assertTrue(fullSyncTime + 50 <= changedSyncTime); // Cleanup resetTimeOffset(); removeUserFederationProvider(id1); } private void assertFederationProvider(UserFederationProviderRepresentation rep, String id, String displayName, String providerName, int priority, int fullSyncPeriod, int changeSyncPeriod, int lastSync, String... config) { Assert.assertEquals(id, rep.getId()); Assert.assertEquals(displayName, rep.getDisplayName()); Assert.assertEquals(providerName, rep.getProviderName()); Assert.assertEquals(priority, rep.getPriority()); Assert.assertEquals(fullSyncPeriod, rep.getFullSyncPeriod()); Assert.assertEquals(changeSyncPeriod, rep.getChangedSyncPeriod()); Assert.assertEquals(lastSync, rep.getLastSync()); Assert.assertMap(rep.getConfig(), config); } */ }
/* * ###### * ###### * ############ ####( ###### #####. ###### ############ ############ * ############# #####( ###### #####. ###### ############# ############# * ###### #####( ###### #####. ###### ##### ###### ##### ###### * ###### ###### #####( ###### #####. ###### ##### ##### ##### ###### * ###### ###### #####( ###### #####. ###### ##### ##### ###### * ############# ############# ############# ############# ##### ###### * ############ ############ ############# ############ ##### ###### * ###### * ############# * ############ * * Adyen Java API Library * * Copyright (c) 2020 Adyen B.V. * This file is open source and available under the MIT license. * See the LICENSE file for more info. */ package com.adyen.service; import com.adyen.Client; import com.adyen.Service; import com.adyen.model.marketpay.CheckAccountHolderResponse; import com.adyen.model.marketpay.CloseAccountHolderRequest; import com.adyen.model.marketpay.CloseAccountHolderResponse; import com.adyen.model.marketpay.CloseAccountRequest; import com.adyen.model.marketpay.CloseAccountResponse; import com.adyen.model.marketpay.CreateAccountHolderRequest; import com.adyen.model.marketpay.CreateAccountHolderResponse; import com.adyen.model.marketpay.CreateAccountRequest; import com.adyen.model.marketpay.CreateAccountResponse; import com.adyen.model.marketpay.DeleteBankAccountRequest; import com.adyen.model.marketpay.DeleteBankAccountResponse; import com.adyen.model.marketpay.DeletePayoutMethodRequest; import com.adyen.model.marketpay.DeletePayoutMethodResponse; import com.adyen.model.marketpay.DeleteShareholderRequest; import com.adyen.model.marketpay.DeleteShareholderResponse; import com.adyen.model.marketpay.DeleteSignatoriesRequest; import com.adyen.model.marketpay.DeleteSignatoriesResponse; import com.adyen.model.marketpay.GetAccountHolderRequest; import com.adyen.model.marketpay.GetAccountHolderResponse; import com.adyen.model.marketpay.GetTaxFormRequest; import com.adyen.model.marketpay.GetTaxFormResponse; import com.adyen.model.marketpay.GetUploadedDocumentsRequest; import com.adyen.model.marketpay.GetUploadedDocumentsResponse; import com.adyen.model.marketpay.PerformVerificationRequest; import com.adyen.model.marketpay.SuspendAccountHolderRequest; import com.adyen.model.marketpay.SuspendAccountHolderResponse; import com.adyen.model.marketpay.UnSuspendAccountHolderRequest; import com.adyen.model.marketpay.UnSuspendAccountHolderResponse; import com.adyen.model.marketpay.UpdateAccountHolderRequest; import com.adyen.model.marketpay.UpdateAccountHolderResponse; import com.adyen.model.marketpay.UpdateAccountHolderStateRequest; import com.adyen.model.marketpay.UpdateAccountHolderStateResponse; import com.adyen.model.marketpay.UpdateAccountRequest; import com.adyen.model.marketpay.UpdateAccountResponse; import com.adyen.model.marketpay.UploadDocumentRequest; import com.adyen.model.marketpay.UploadDocumentResponse; import com.adyen.service.exception.ApiException; import com.adyen.service.resource.account.CheckAccountHolder; import com.adyen.service.resource.account.CloseAccount; import com.adyen.service.resource.account.CloseAccountHolder; import com.adyen.service.resource.account.CreateAccount; import com.adyen.service.resource.account.CreateAccountHolder; import com.adyen.service.resource.account.DeleteBankAccount; import com.adyen.service.resource.account.DeletePayoutMethod; import com.adyen.service.resource.account.DeleteShareholder; import com.adyen.service.resource.account.DeleteSignatories; import com.adyen.service.resource.account.GetAccountHolder; import com.adyen.service.resource.account.GetTaxForm; import com.adyen.service.resource.account.GetUploadedDocuments; import com.adyen.service.resource.account.SuspendAccountHolder; import com.adyen.service.resource.account.UnSuspendAccountHolder; import com.adyen.service.resource.account.UpdateAccount; import com.adyen.service.resource.account.UpdateAccountHolder; import com.adyen.service.resource.account.UpdateAccountHolderState; import com.adyen.service.resource.account.UploadDocument; import com.google.gson.reflect.TypeToken; import java.io.IOException; public class Account extends Service { private CreateAccountHolder createAccountHolder; private UpdateAccountHolder updateAccountHolder; private GetAccountHolder getAccountHolder; private UploadDocument uploadDocument; private CreateAccount createAccount; private DeleteBankAccount deleteBankAccount; private DeleteShareholder deleteShareholder; private DeleteSignatories deleteSignatories; private SuspendAccountHolder suspendAccountHolder; private UnSuspendAccountHolder unSuspendAccountHolder; private UpdateAccountHolderState updateAccountHolderState; private CloseAccount closeAccount; private CloseAccountHolder closeAccountHolder; private UpdateAccount updateAccount; private GetUploadedDocuments getUploadedDocuments; private CheckAccountHolder checkAccountHolder; private DeletePayoutMethod deletePayoutMethod; private GetTaxForm getTaxForm; public Account(Client client) { super(client); createAccountHolder = new CreateAccountHolder(this); updateAccountHolder = new UpdateAccountHolder(this); getAccountHolder = new GetAccountHolder(this); uploadDocument = new UploadDocument(this); createAccount = new CreateAccount(this); deleteBankAccount = new DeleteBankAccount(this); deleteShareholder = new DeleteShareholder(this); deleteSignatories = new DeleteSignatories(this); suspendAccountHolder = new SuspendAccountHolder(this); unSuspendAccountHolder = new UnSuspendAccountHolder(this); updateAccountHolderState = new UpdateAccountHolderState(this); closeAccount = new CloseAccount(this); closeAccountHolder = new CloseAccountHolder(this); updateAccount = new UpdateAccount(this); getUploadedDocuments = new GetUploadedDocuments(this); checkAccountHolder = new CheckAccountHolder(this); deletePayoutMethod = new DeletePayoutMethod(this); getTaxForm = new GetTaxForm(this); } public CreateAccountHolderResponse createAccountHolder(CreateAccountHolderRequest accountHolderRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(accountHolderRequest); String jsonResult = createAccountHolder.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<CreateAccountHolderResponse>() { }.getType()); } public UpdateAccountHolderResponse updateAccountHolder(UpdateAccountHolderRequest updateAccountHolderRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(updateAccountHolderRequest); String jsonResult = updateAccountHolder.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<UpdateAccountHolderResponse>() { }.getType()); } public GetAccountHolderResponse getAccountHolder(GetAccountHolderRequest getAccountHolderRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(getAccountHolderRequest); String jsonResult = getAccountHolder.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<GetAccountHolderResponse>() { }.getType()); } public UploadDocumentResponse uploadDocument(UploadDocumentRequest uploadDocumentRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(uploadDocumentRequest); String jsonResult = uploadDocument.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<UploadDocumentResponse>() { }.getType()); } public CreateAccountResponse createAccount(CreateAccountRequest createAccountRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(createAccountRequest); String jsonResult = createAccount.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<CreateAccountResponse>() { }.getType()); } public DeleteBankAccountResponse deleteBankAccount(DeleteBankAccountRequest deleteBankAccountRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(deleteBankAccountRequest); String jsonResult = deleteBankAccount.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<DeleteBankAccountResponse>() { }.getType()); } public DeleteShareholderResponse deleteShareholder(DeleteShareholderRequest deleteShareholderRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(deleteShareholderRequest); String jsonResult = deleteShareholder.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<DeleteShareholderResponse>() { }.getType()); } public DeleteSignatoriesResponse deleteSignatories(DeleteSignatoriesRequest deleteSignatoriesRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(deleteSignatoriesRequest); String jsonResult = deleteSignatories.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<DeleteSignatoriesResponse>() { }.getType()); } public SuspendAccountHolderResponse suspendAccountHolder(SuspendAccountHolderRequest suspendAccountHolderRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(suspendAccountHolderRequest); String jsonResult = suspendAccountHolder.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<SuspendAccountHolderResponse>() { }.getType()); } public UnSuspendAccountHolderResponse unSuspendAccountHolder(UnSuspendAccountHolderRequest unSuspendAccountHolderRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(unSuspendAccountHolderRequest); String jsonResult = unSuspendAccountHolder.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<UnSuspendAccountHolderResponse>() { }.getType()); } public UpdateAccountHolderStateResponse updateAccountHolderState(UpdateAccountHolderStateRequest updateAccountHolderStateRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(updateAccountHolderStateRequest); String jsonResult = updateAccountHolderState.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<UpdateAccountHolderStateResponse>() { }.getType()); } public CloseAccountResponse closeAccount(CloseAccountRequest closeAccountRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(closeAccountRequest); String jsonResult = closeAccount.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<CloseAccountResponse>() { }.getType()); } public CloseAccountHolderResponse closeAccountHolder(CloseAccountHolderRequest closeAccountHolderRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(closeAccountHolderRequest); String jsonResult = closeAccountHolder.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<CloseAccountHolderResponse>() { }.getType()); } public UpdateAccountResponse updateAccount(UpdateAccountRequest updateAccountRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(updateAccountRequest); String jsonResult = updateAccount.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<UpdateAccountResponse>() { }.getType()); } public GetUploadedDocumentsResponse getUploadedDocuments(GetUploadedDocumentsRequest getUploadedDocumentsRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(getUploadedDocumentsRequest); String jsonResult = getUploadedDocuments.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<GetUploadedDocumentsResponse>() { }.getType()); } public CheckAccountHolderResponse checkAccountHolder(PerformVerificationRequest performVerificationRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(performVerificationRequest); String jsonResult = checkAccountHolder.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<CheckAccountHolderResponse>() { }.getType()); } public DeletePayoutMethodResponse deletePayoutMethod(DeletePayoutMethodRequest deletePayoutMethodRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(deletePayoutMethodRequest); String jsonResult = deletePayoutMethod.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<DeletePayoutMethodResponse>() { }.getType()); } public GetTaxFormResponse getTaxForm(GetTaxFormRequest getTaxFormRequest) throws ApiException, IOException { String jsonRequest = GSON.toJson(getTaxFormRequest); String jsonResult = getTaxForm.request(jsonRequest); return GSON.fromJson(jsonResult, new TypeToken<GetTaxFormResponse>() { }.getType()); } }
/* Derby - Class org.apache.derby.impl.store.access.heap.HeapScan Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.store.access.heap; /** A heap scan object represents an instance of an scan on a heap conglomerate. **/ import org.apache.derby.shared.common.reference.SQLState; import org.apache.derby.shared.common.sanity.SanityManager; import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.iapi.store.access.conglomerate.ScanManager; import org.apache.derby.iapi.store.access.conglomerate.TransactionManager; import org.apache.derby.iapi.store.access.BackingStoreHashtable; import org.apache.derby.iapi.store.access.Qualifier; import org.apache.derby.iapi.store.access.RowUtil; import org.apache.derby.iapi.store.access.ScanInfo; import org.apache.derby.iapi.store.raw.RecordHandle; import org.apache.derby.iapi.types.DataValueDescriptor; import org.apache.derby.iapi.types.RowLocation; import org.apache.derby.impl.store.access.conglomerate.GenericScanController; import org.apache.derby.impl.store.access.conglomerate.RowPosition; class HeapScan extends GenericScanController implements ScanManager { /************************************************************************** * Constants of HeapScan ************************************************************************** */ /************************************************************************** * Fields of HeapScan ************************************************************************** */ /** * A 1 element array to turn fetchNext and fetch calls into * fetchNextGroup calls. **/ private DataValueDescriptor[][] fetchNext_one_slot_array = new DataValueDescriptor[1][]; /************************************************************************** * Constructors for This class: ************************************************************************** */ /** ** The only constructor for a heap scan returns a scan in the ** closed state, the caller must call open. **/ public HeapScan() { } /************************************************************************** * Protected concrete impl of abstract methods of * GenericController class: ************************************************************************** */ protected void queueDeletePostCommitWork( RowPosition pos) throws StandardException { TransactionManager xact_mgr = open_conglom.getXactMgr(); xact_mgr.addPostCommitWork( new HeapPostCommit( xact_mgr.getAccessManager(), pos.current_page.getPageKey())); } /************************************************************************** * Private/Protected methods of This class: ************************************************************************** */ protected void setRowLocationArray( RowLocation[] rowloc_array, int index, RowPosition pos) throws StandardException { if (rowloc_array[index] == null) { rowloc_array[index] = new HeapRowLocation(pos.current_rh); } else { if (SanityManager.DEBUG) { SanityManager.ASSERT( rowloc_array[index] instanceof HeapRowLocation); } ((HeapRowLocation)rowloc_array[index]).setFrom(pos.current_rh); } } protected RowLocation makeRowLocation( RowPosition pos ) throws StandardException { return new HeapRowLocation( pos.current_rh ); } protected void setRowLocationArray( RowLocation[] rowloc_array, int index, RecordHandle rh) throws StandardException { if (rowloc_array[index] == null) { rowloc_array[index] = new HeapRowLocation(rh); } else { if (SanityManager.DEBUG) { SanityManager.ASSERT( rowloc_array[index] instanceof HeapRowLocation); } ((HeapRowLocation)rowloc_array[index]).setFrom(rh); } } /** * Reposition the current scan and sets the necessary locks. * * @param rh An existing RecordHandle within the conglomerate, * at which to position the start of the scan. The scan will begin at this * location and continue forward until the end of the conglomerate. * Positioning at a non-existent RowLocation (ie. an invalid one or one that * had been deleted), will result in an exception being thrown when the * first next operation is attempted. * @return true if the scan was successfully repositioned * * @exception StandardException Standard exception policy. */ private boolean reopenScanByRecordHandleAndSetLocks (RecordHandle rh) throws StandardException { if (rh == null) { return (false); } // Unlock current position if (scan_position.current_rh != null) { open_conglom.unlockPositionAfterRead(scan_position); } // Position scan at new row scan_position.current_rh = rh; scan_position.current_rh_qualified = false; // Latch page and reposition scan final boolean rowLocationDisappeared = open_conglom.latchPageAndRepositionScan(scan_position); if (!rowLocationDisappeared) { setScanState(SCAN_INPROGRESS); open_conglom.lockPositionForRead (scan_position, null, true, true); } // Unlatch page scan_position.unlatch(); return (!rowLocationDisappeared); } /** Fetch the row at the next position of the Scan. If there is a valid next position in the scan then the value in the template storable row is replaced with the value of the row at the current scan position. The columns of the template row must be of the same type as the actual columns in the underlying conglomerate. The resulting contents of templateRow after a fetchNext() which returns false is undefined. The result of calling fetchNext(row) is exactly logically equivalent to making a next() call followed by a fetch(row) call. This interface allows implementations to optimize the 2 calls if possible. @param fetch_row The template row into which the value of the next position in the scan is to be stored. @return True if there is a next position in the scan, false if there isn't. @exception StandardException Standard exception policy. **/ public boolean fetchNext(DataValueDescriptor[] fetch_row) throws StandardException { // Turn this call into a group fetch of a 1 element group. if (fetch_row == null) fetchNext_one_slot_array[0] = RowUtil.EMPTY_ROW; else fetchNext_one_slot_array[0] = fetch_row; boolean ret_val = fetchRows( fetchNext_one_slot_array, (RowLocation[]) null, (BackingStoreHashtable) null, 1, (int[]) null) == 1; return(ret_val); } /** @see org.apache.derby.iapi.store.access.ScanController#next **/ public boolean next() throws StandardException { // if there is no row template from the caller, we need to // read the row into something, Use the scratch row. // We could optimize this, if there are no qualifiers and read // into a zero column row, but callers should be using fetchNext() // instead. fetchNext_one_slot_array[0] = open_conglom.getRuntimeMem().get_scratch_row( open_conglom.getRawTran()); boolean ret_val = fetchRows( fetchNext_one_slot_array, (RowLocation[]) null, (BackingStoreHashtable) null, 1, (int[]) null) == 1; return(ret_val); } /** * @see org.apache.derby.iapi.store.access.ScanController#positionAtRowLocation */ public boolean positionAtRowLocation(RowLocation rl) throws StandardException { if (open_conglom.isClosed() && !rowLocationsInvalidated) { reopenAfterEndTransaction(); } if (rowLocationsInvalidated) { return(false); } else { return(reopenScanByRecordHandleAndSetLocks (((HeapRowLocation)rl). getRecordHandle(open_conglom.getContainer()))); } } /************************************************************************** * Public Methods of ScanController interface: ************************************************************************** */ /** @see org.apache.derby.iapi.store.access.ScanController#fetchLocation **/ public void fetchLocation(RowLocation templateLocation) throws StandardException { if (open_conglom.getContainer() == null || scan_position.current_rh == null) { throw StandardException.newException( SQLState.HEAP_SCAN_NOT_POSITIONED); } HeapRowLocation hrl = (HeapRowLocation) templateLocation; hrl.setFrom(scan_position.current_rh); } public int fetchNextGroup( DataValueDescriptor[][] row_array, RowLocation[] rowloc_array) throws StandardException { return( fetchRows( row_array, rowloc_array, (BackingStoreHashtable) null, row_array.length, (int[]) null)); } public int fetchNextGroup( DataValueDescriptor[][] row_array, RowLocation[] old_rowloc_array, RowLocation[] new_rowloc_array) throws StandardException { throw(StandardException.newException( SQLState.HEAP_UNIMPLEMENTED_FEATURE)); } /** * Return ScanInfo object which describes performance of scan. * <p> * Return ScanInfo object which contains information about the current * scan. * <p> * * @see ScanInfo * * @return The ScanInfo object which contains info about current scan. * * @exception StandardException Standard exception policy. **/ public ScanInfo getScanInfo() throws StandardException { return(new HeapScanInfo(this)); } /** Reposition the current scan. This call is semantically the same as if the current scan had been closed and a openScan() had been called instead. The scan is reopened against the same conglomerate, and the scan is reopened with the same "scan column list", "hold" and "forUpdate" parameters passed in the original openScan. <p> The statistics gathered by the scan are not reset to 0 by a reopenScan(), rather they continue to accumulate. <p> Note that this operation is currently only supported on Heap conglomerates. Also note that order of rows within are heap are not guaranteed, so for instance positioning at a RowLocation in the "middle" of a heap, then inserting more data, then continuing the scan is not guaranteed to see the new rows - they may be put in the "beginning" of the heap. @param startRowLocation An existing RowLocation within the conglomerate, at which to position the start of the scan. The scan will begin at this location and continue forward until the end of the conglomerate. Positioning at a non-existent RowLocation (ie. an invalid one or one that had been deleted), will result in an exception being thrown when the first next operation is attempted. @param qualifier An array of qualifiers which, applied to each key, restrict the rows returned by the scan. Rows for which any one of the qualifiers returns false are not returned by the scan. If null, all rows are returned. @exception StandardException Standard exception policy. **/ public void reopenScanByRowLocation( RowLocation startRowLocation, Qualifier qualifier[][]) throws StandardException { reopenScanByRecordHandle( ((HeapRowLocation) startRowLocation).getRecordHandle( open_conglom.getContainer()), qualifier); } }
// Portions copyright 2002, Google, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.swistowski.vaulthelper.purchase; // This code was converted from code at http://iharder.sourceforge.net/base64/ // Lots of extraneous features were removed. /* The original code said: * <p> * I am placing this code in the Public Domain. Do with it as you will. * This software comes with no guarantees or warranties but with * plenty of well-wishing instead! * Please visit * <a href="http://iharder.net/xmlizable">http://iharder.net/xmlizable</a> * periodically to check for updates or to contribute improvements. * </p> * * @author Robert Harder * @author rharder@usa.net * @version 1.3 */ /** * Base64 converter class. This code is not a complete MIME encoder; * it simply converts binary data to base64 data and back. * * <p>Note {@link CharBase64} is a GWT-compatible implementation of this * class. */ public class Base64 { /** Specify encoding (value is {@code true}). */ public final static boolean ENCODE = true; /** Specify decoding (value is {@code false}). */ public final static boolean DECODE = false; /** The equals sign (=) as a byte. */ private final static byte EQUALS_SIGN = (byte) '='; /** The new line character (\n) as a byte. */ private final static byte NEW_LINE = (byte) '\n'; /** * The 64 valid Base64 values. */ private final static byte[] ALPHABET = {(byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F', (byte) 'G', (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K', (byte) 'L', (byte) 'M', (byte) 'N', (byte) 'O', (byte) 'P', (byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U', (byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z', (byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e', (byte) 'f', (byte) 'g', (byte) 'h', (byte) 'i', (byte) 'j', (byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o', (byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't', (byte) 'u', (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y', (byte) 'z', (byte) '0', (byte) '1', (byte) '2', (byte) '3', (byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8', (byte) '9', (byte) '+', (byte) '/'}; /** * The 64 valid web safe Base64 values. */ private final static byte[] WEBSAFE_ALPHABET = {(byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F', (byte) 'G', (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K', (byte) 'L', (byte) 'M', (byte) 'N', (byte) 'O', (byte) 'P', (byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U', (byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z', (byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e', (byte) 'f', (byte) 'g', (byte) 'h', (byte) 'i', (byte) 'j', (byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o', (byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't', (byte) 'u', (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y', (byte) 'z', (byte) '0', (byte) '1', (byte) '2', (byte) '3', (byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8', (byte) '9', (byte) '-', (byte) '_'}; /** * Translates a Base64 value to either its 6-bit reconstruction value * or a negative number indicating some other meaning. **/ private final static byte[] DECODABET = {-9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8 -5, -5, // Whitespace: Tab and Linefeed -9, -9, // Decimal 11 - 12 -5, // Whitespace: Carriage Return -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26 -9, -9, -9, -9, -9, // Decimal 27 - 31 -5, // Whitespace: Space -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 42 62, // Plus sign at decimal 43 -9, -9, -9, // Decimal 44 - 46 63, // Slash at decimal 47 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine -9, -9, -9, // Decimal 58 - 60 -1, // Equals sign at decimal 61 -9, -9, -9, // Decimal 62 - 64 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N' 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z' -9, -9, -9, -9, -9, -9, // Decimal 91 - 96 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm' 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z' -9, -9, -9, -9, -9 // Decimal 123 - 127 /* ,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 128 - 139 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */ }; /** The web safe decodabet */ private final static byte[] WEBSAFE_DECODABET = {-9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8 -5, -5, // Whitespace: Tab and Linefeed -9, -9, // Decimal 11 - 12 -5, // Whitespace: Carriage Return -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26 -9, -9, -9, -9, -9, // Decimal 27 - 31 -5, // Whitespace: Space -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 44 62, // Dash '-' sign at decimal 45 -9, -9, // Decimal 46-47 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine -9, -9, -9, // Decimal 58 - 60 -1, // Equals sign at decimal 61 -9, -9, -9, // Decimal 62 - 64 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N' 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z' -9, -9, -9, -9, // Decimal 91-94 63, // Underscore '_' at decimal 95 -9, // Decimal 96 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm' 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z' -9, -9, -9, -9, -9 // Decimal 123 - 127 /* ,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 128 - 139 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */ }; // Indicates white space in encoding private final static byte WHITE_SPACE_ENC = -5; // Indicates equals sign in encoding private final static byte EQUALS_SIGN_ENC = -1; /** Defeats instantiation. */ private Base64() { } /* ******** E N C O D I N G M E T H O D S ******** */ /** * Encodes up to three bytes of the array <var>source</var> * and writes the resulting four Base64 bytes to <var>destination</var>. * The source and destination arrays can be manipulated * anywhere along their length by specifying * <var>srcOffset</var> and <var>destOffset</var>. * This method does not check to make sure your arrays * are large enough to accommodate <var>srcOffset</var> + 3 for * the <var>source</var> array or <var>destOffset</var> + 4 for * the <var>destination</var> array. * The actual number of significant bytes in your array is * given by <var>numSigBytes</var>. * * @param source the array to convert * @param srcOffset the index where conversion begins * @param numSigBytes the number of significant bytes in your array * @param destination the array to hold the conversion * @param destOffset the index where output will be put * @param alphabet is the encoding alphabet * @return the <var>destination</var> array * @since 1.3 */ private static byte[] encode3to4(byte[] source, int srcOffset, int numSigBytes, byte[] destination, int destOffset, byte[] alphabet) { // 1 2 3 // 01234567890123456789012345678901 Bit position // --------000000001111111122222222 Array position from threeBytes // --------| || || || | Six bit groups to index alphabet // >>18 >>12 >> 6 >> 0 Right shift necessary // 0x3f 0x3f 0x3f Additional AND // Create buffer with zero-padding if there are only one or two // significant bytes passed in the array. // We have to shift left 24 in order to flush out the 1's that appear // when Java treats a value as negative that is cast from a byte to an int. int inBuff = (numSigBytes > 0 ? ((source[srcOffset] << 24) >>> 8) : 0) | (numSigBytes > 1 ? ((source[srcOffset + 1] << 24) >>> 16) : 0) | (numSigBytes > 2 ? ((source[srcOffset + 2] << 24) >>> 24) : 0); switch (numSigBytes) { case 3: destination[destOffset] = alphabet[(inBuff >>> 18)]; destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f]; destination[destOffset + 2] = alphabet[(inBuff >>> 6) & 0x3f]; destination[destOffset + 3] = alphabet[(inBuff) & 0x3f]; return destination; case 2: destination[destOffset] = alphabet[(inBuff >>> 18)]; destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f]; destination[destOffset + 2] = alphabet[(inBuff >>> 6) & 0x3f]; destination[destOffset + 3] = EQUALS_SIGN; return destination; case 1: destination[destOffset] = alphabet[(inBuff >>> 18)]; destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f]; destination[destOffset + 2] = EQUALS_SIGN; destination[destOffset + 3] = EQUALS_SIGN; return destination; default: return destination; } // end switch } // end encode3to4 /** * Encodes a byte array into Base64 notation. * Equivalent to calling * {@code encodeBytes(source, 0, source.length)} * * @param source The data to convert * @since 1.4 */ public static String encode(byte[] source) { return encode(source, 0, source.length, ALPHABET, true); } /** * Encodes a byte array into web safe Base64 notation. * * @param source The data to convert * @param doPadding is {@code true} to pad result with '=' chars * if it does not fall on 3 byte boundaries */ public static String encodeWebSafe(byte[] source, boolean doPadding) { return encode(source, 0, source.length, WEBSAFE_ALPHABET, doPadding); } /** * Encodes a byte array into Base64 notation. * * @param source the data to convert * @param off offset in array where conversion should begin * @param len length of data to convert * @param alphabet the encoding alphabet * @param doPadding is {@code true} to pad result with '=' chars * if it does not fall on 3 byte boundaries * @since 1.4 */ public static String encode(byte[] source, int off, int len, byte[] alphabet, boolean doPadding) { byte[] outBuff = encode(source, off, len, alphabet, Integer.MAX_VALUE); int outLen = outBuff.length; // If doPadding is false, set length to truncate '=' // padding characters while (doPadding == false && outLen > 0) { if (outBuff[outLen - 1] != '=') { break; } outLen -= 1; } return new String(outBuff, 0, outLen); } /** * Encodes a byte array into Base64 notation. * * @param source the data to convert * @param off offset in array where conversion should begin * @param len length of data to convert * @param alphabet is the encoding alphabet * @param maxLineLength maximum length of one line. * @return the BASE64-encoded byte array */ public static byte[] encode(byte[] source, int off, int len, byte[] alphabet, int maxLineLength) { int lenDiv3 = (len + 2) / 3; // ceil(len / 3) int len43 = lenDiv3 * 4; byte[] outBuff = new byte[len43 // Main 4:3 + (len43 / maxLineLength)]; // New lines int d = 0; int e = 0; int len2 = len - 2; int lineLength = 0; for (; d < len2; d += 3, e += 4) { // The following block of code is the same as // encode3to4( source, d + off, 3, outBuff, e, alphabet ); // but inlined for faster encoding (~20% improvement) int inBuff = ((source[d + off] << 24) >>> 8) | ((source[d + 1 + off] << 24) >>> 16) | ((source[d + 2 + off] << 24) >>> 24); outBuff[e] = alphabet[(inBuff >>> 18)]; outBuff[e + 1] = alphabet[(inBuff >>> 12) & 0x3f]; outBuff[e + 2] = alphabet[(inBuff >>> 6) & 0x3f]; outBuff[e + 3] = alphabet[(inBuff) & 0x3f]; lineLength += 4; if (lineLength == maxLineLength) { outBuff[e + 4] = NEW_LINE; e++; lineLength = 0; } // end if: end of line } // end for: each piece of array if (d < len) { encode3to4(source, d + off, len - d, outBuff, e, alphabet); lineLength += 4; if (lineLength == maxLineLength) { // Add a last newline outBuff[e + 4] = NEW_LINE; e++; } e += 4; } assert (e == outBuff.length); return outBuff; } /* ******** D E C O D I N G M E T H O D S ******** */ /** * Decodes four bytes from array <var>source</var> * and writes the resulting bytes (up to three of them) * to <var>destination</var>. * The source and destination arrays can be manipulated * anywhere along their length by specifying * <var>srcOffset</var> and <var>destOffset</var>. * This method does not check to make sure your arrays * are large enough to accommodate <var>srcOffset</var> + 4 for * the <var>source</var> array or <var>destOffset</var> + 3 for * the <var>destination</var> array. * This method returns the actual number of bytes that * were converted from the Base64 encoding. * * * @param source the array to convert * @param srcOffset the index where conversion begins * @param destination the array to hold the conversion * @param destOffset the index where output will be put * @param decodabet the decodabet for decoding Base64 content * @return the number of decoded bytes converted * @since 1.3 */ private static int decode4to3(byte[] source, int srcOffset, byte[] destination, int destOffset, byte[] decodabet) { // Example: Dk== if (source[srcOffset + 2] == EQUALS_SIGN) { int outBuff = ((decodabet[source[srcOffset]] << 24) >>> 6) | ((decodabet[source[srcOffset + 1]] << 24) >>> 12); destination[destOffset] = (byte) (outBuff >>> 16); return 1; } else if (source[srcOffset + 3] == EQUALS_SIGN) { // Example: DkL= int outBuff = ((decodabet[source[srcOffset]] << 24) >>> 6) | ((decodabet[source[srcOffset + 1]] << 24) >>> 12) | ((decodabet[source[srcOffset + 2]] << 24) >>> 18); destination[destOffset] = (byte) (outBuff >>> 16); destination[destOffset + 1] = (byte) (outBuff >>> 8); return 2; } else { // Example: DkLE int outBuff = ((decodabet[source[srcOffset]] << 24) >>> 6) | ((decodabet[source[srcOffset + 1]] << 24) >>> 12) | ((decodabet[source[srcOffset + 2]] << 24) >>> 18) | ((decodabet[source[srcOffset + 3]] << 24) >>> 24); destination[destOffset] = (byte) (outBuff >> 16); destination[destOffset + 1] = (byte) (outBuff >> 8); destination[destOffset + 2] = (byte) (outBuff); return 3; } } // end decodeToBytes /** * Decodes data from Base64 notation. * * @param s the string to decode (decoded in default encoding) * @return the decoded data * @since 1.4 */ public static byte[] decode(String s) throws Base64DecoderException { byte[] bytes = s.getBytes(); return decode(bytes, 0, bytes.length); } /** * Decodes data from web safe Base64 notation. * Web safe encoding uses '-' instead of '+', '_' instead of '/' * * @param s the string to decode (decoded in default encoding) * @return the decoded data */ public static byte[] decodeWebSafe(String s) throws Base64DecoderException { byte[] bytes = s.getBytes(); return decodeWebSafe(bytes, 0, bytes.length); } /** * Decodes Base64 content in byte array format and returns * the decoded byte array. * * @param source The Base64 encoded data * @return decoded data * @since 1.3 * @throws Base64DecoderException */ public static byte[] decode(byte[] source) throws Base64DecoderException { return decode(source, 0, source.length); } /** * Decodes web safe Base64 content in byte array format and returns * the decoded data. * Web safe encoding uses '-' instead of '+', '_' instead of '/' * * @param source the string to decode (decoded in default encoding) * @return the decoded data */ public static byte[] decodeWebSafe(byte[] source) throws Base64DecoderException { return decodeWebSafe(source, 0, source.length); } /** * Decodes Base64 content in byte array format and returns * the decoded byte array. * * @param source the Base64 encoded data * @param off the offset of where to begin decoding * @param len the length of characters to decode * @return decoded data * @since 1.3 * @throws Base64DecoderException */ public static byte[] decode(byte[] source, int off, int len) throws Base64DecoderException { return decode(source, off, len, DECODABET); } /** * Decodes web safe Base64 content in byte array format and returns * the decoded byte array. * Web safe encoding uses '-' instead of '+', '_' instead of '/' * * @param source the Base64 encoded data * @param off the offset of where to begin decoding * @param len the length of characters to decode * @return decoded data */ public static byte[] decodeWebSafe(byte[] source, int off, int len) throws Base64DecoderException { return decode(source, off, len, WEBSAFE_DECODABET); } /** * Decodes Base64 content using the supplied decodabet and returns * the decoded byte array. * * @param source the Base64 encoded data * @param off the offset of where to begin decoding * @param len the length of characters to decode * @param decodabet the decodabet for decoding Base64 content * @return decoded data */ public static byte[] decode(byte[] source, int off, int len, byte[] decodabet) throws Base64DecoderException { int len34 = len * 3 / 4; byte[] outBuff = new byte[2 + len34]; // Upper limit on size of output int outBuffPosn = 0; byte[] b4 = new byte[4]; int b4Posn = 0; int i = 0; byte sbiCrop = 0; byte sbiDecode = 0; for (i = 0; i < len; i++) { sbiCrop = (byte) (source[i + off] & 0x7f); // Only the low seven bits sbiDecode = decodabet[sbiCrop]; if (sbiDecode >= WHITE_SPACE_ENC) { // White space Equals sign or better if (sbiDecode >= EQUALS_SIGN_ENC) { // An equals sign (for padding) must not occur at position 0 or 1 // and must be the last byte[s] in the encoded value if (sbiCrop == EQUALS_SIGN) { int bytesLeft = len - i; byte lastByte = (byte) (source[len - 1 + off] & 0x7f); if (b4Posn == 0 || b4Posn == 1) { throw new Base64DecoderException( "invalid padding byte '=' at byte offset " + i); } else if ((b4Posn == 3 && bytesLeft > 2) || (b4Posn == 4 && bytesLeft > 1)) { throw new Base64DecoderException( "padding byte '=' falsely signals end of encoded value " + "at offset " + i); } else if (lastByte != EQUALS_SIGN && lastByte != NEW_LINE) { throw new Base64DecoderException( "encoded value has invalid trailing byte"); } break; } b4[b4Posn++] = sbiCrop; if (b4Posn == 4) { outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, decodabet); b4Posn = 0; } } } else { throw new Base64DecoderException("Bad Base64 input character at " + i + ": " + source[i + off] + "(decimal)"); } } // Because web safe encoding allows non padding base64 encodes, we // need to pad the rest of the b4 buffer with equal signs when // b4Posn != 0. There can be at most 2 equal signs at the end of // four characters, so the b4 buffer must have two or three // characters. This also catches the case where the input is // padded with EQUALS_SIGN if (b4Posn != 0) { if (b4Posn == 1) { throw new Base64DecoderException("single trailing character at offset " + (len - 1)); } b4[b4Posn++] = EQUALS_SIGN; outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, decodabet); } byte[] out = new byte[outBuffPosn]; System.arraycopy(outBuff, 0, out, 0, outBuffPosn); return out; } }
/* * * ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Rhino code, released * May 6, 1999. * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1997-1999 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Bob Jervis * Google Inc. * * Alternatively, the contents of this file may be used under the terms of * the GNU General Public License Version 2 or later (the "GPL"), in which * case the provisions of the GPL are applicable instead of those above. If * you wish to allow use of your version of this file only under the terms of * the GPL and not to allow others to use your version of this file under the * MPL, indicate your decision by deleting the provisions above and replacing * them with the notice and other provisions required by the GPL. If you do * not delete the provisions above, a recipient may use your version of this * file under either the MPL or the GPL. * * ***** END LICENSE BLOCK ***** */ package com.google.javascript.rhino; import com.google.common.base.Preconditions; import com.google.javascript.rhino.JSDocInfo.Visibility; import java.util.List; import java.util.Set; import javax.annotation.Nullable; /** * A builder for {@link JSDocInfo} objects. This builder abstracts the * construction process of {@link JSDocInfo} objects whilst minimizing the * number of instances of {@link JSDocInfo} objects. It provides early * incompatibility detection among properties stored on the {@code JSDocInfo} * object being created. * */ public final class JSDocInfoBuilder { // the current JSDoc which is being populated private JSDocInfo currentInfo; // whether the current JSDocInfo has valuable information private boolean populated; // whether to include the documentation itself when parsing the JsDoc private boolean parseDocumentation; // the current marker, if any. private JSDocInfo.Marker currentMarker; public JSDocInfoBuilder(boolean parseDocumentation) { this(new JSDocInfo(parseDocumentation), parseDocumentation, false); } private JSDocInfoBuilder( JSDocInfo info, boolean parseDocumentation, boolean populated) { this.currentInfo = info; this.parseDocumentation = parseDocumentation; this.populated = populated; } public static JSDocInfoBuilder copyFrom(JSDocInfo info) { JSDocInfo clone = info.clone(); if (clone.getVisibility() == Visibility.INHERITED) { clone.setVisibility(null); } return new JSDocInfoBuilder(clone, info.isDocumentationIncluded(), true); } public static JSDocInfoBuilder maybeCopyFrom(@Nullable JSDocInfo info) { if (info == null) { return new JSDocInfoBuilder(true); } return copyFrom(info); } /** * Sets the original JSDoc comment string. This is a no-op if the builder * isn't configured to record documentation. */ public void recordOriginalCommentString(String sourceComment) { if (parseDocumentation) { currentInfo.setOriginalCommentString(sourceComment); } } /** * Sets the position of original JSDoc comment. */ public void recordOriginalCommentPosition(int position) { if (parseDocumentation) { currentInfo.setOriginalCommentPosition(position); } } public boolean shouldParseDocumentation() { return parseDocumentation; } /** * Returns whether this builder is populated with information that can be * used to {@link #build} a {@link JSDocInfo} object. */ public boolean isPopulated() { return populated; } /** * Returns whether this builder is populated with information that can be * used to {@link #build} a {@link JSDocInfo} object that has a * fileoverview tag. */ public boolean isPopulatedWithFileOverview() { return isPopulated() && (currentInfo.hasFileOverview() || currentInfo.isExterns() || currentInfo.isNoCompile()); } /** * Returns whether this builder recorded a description. */ public boolean isDescriptionRecorded() { return currentInfo.getDescription() != null; } /** * Builds a {@link JSDocInfo} object based on the populated information and * returns it. * * @return a {@link JSDocInfo} object populated with the values given to this * builder. If no value was populated, this method simply returns * {@code null} */ public JSDocInfo build() { return build(false); } /** * Builds a {@link JSDocInfo} object based on the populated information and * returns it. Once this method is called, the builder can be reused to build * another {@link JSDocInfo} object. * * @return a {@link JSDocInfo} object populated with the values given to this * builder. If no value was populated, this method simply returns * {@code null} */ public JSDocInfo buildAndReset() { JSDocInfo info = build(false); if (currentInfo == null) { currentInfo = new JSDocInfo(parseDocumentation); populated = false; } return info; } /** * Builds a {@link JSDocInfo} object based on the populated information and * returns it. * * @param always Return an default JSDoc object. * @return a {@link JSDocInfo} object populated with the values given to this * builder. If no value was populated and {@code always} is false, returns * {@code null}. If {@code always} is true, returns a default JSDocInfo. */ public JSDocInfo build(boolean always) { if (populated || always) { Preconditions.checkState(currentInfo != null); JSDocInfo built = currentInfo; currentInfo = null; populateDefaults(built); populated = false; return built; } else { return null; } } /** Generate defaults when certain parameters are not specified. */ private static void populateDefaults(JSDocInfo info) { if (info.getVisibility() == null) { info.setVisibility(Visibility.INHERITED); } } /** * Adds a marker to the current JSDocInfo and populates the marker with the * annotation information. */ public void markAnnotation(String annotation, int lineno, int charno) { JSDocInfo.Marker marker = currentInfo.addMarker(); if (marker != null) { JSDocInfo.TrimmedStringPosition position = new JSDocInfo.TrimmedStringPosition(); position.setItem(annotation); position.setPositionInformation(lineno, charno, lineno, charno + annotation.length()); marker.setAnnotation(position); populated = true; } currentMarker = marker; } /** * Adds a textual block to the current marker. */ public void markText(String text, int startLineno, int startCharno, int endLineno, int endCharno) { if (currentMarker != null) { JSDocInfo.StringPosition position = new JSDocInfo.StringPosition(); position.setItem(text); position.setPositionInformation(startLineno, startCharno, endLineno, endCharno); currentMarker.setDescription(position); } } /** * Adds a type declaration to the current marker. */ public void markTypeNode(Node typeNode, int lineno, int startCharno, int endLineno, int endCharno, boolean hasLC) { if (currentMarker != null) { JSDocInfo.TypePosition position = new JSDocInfo.TypePosition(); position.setItem(typeNode); position.setHasBrackets(hasLC); position.setPositionInformation(lineno, startCharno, endLineno, endCharno); currentMarker.setType(position); } } /** * Adds a name declaration to the current marker. */ public void markName(String name, StaticSourceFile file, int lineno, int charno) { if (currentMarker != null) { // Record the name as both a SourcePosition<String> and a // SourcePosition<Node>. The <String> form is deprecated, // because <Node> is more consistent with how other name // references are handled (see #markTypeNode) // // TODO(nicksantos): Remove all uses of the Name position // and replace them with the NameNode position. JSDocInfo.TrimmedStringPosition position = new JSDocInfo.TrimmedStringPosition(); position.setItem(name); position.setPositionInformation(lineno, charno, lineno, charno + name.length()); currentMarker.setName(position); JSDocInfo.NamePosition nodePos = new JSDocInfo.NamePosition(); Node node = Node.newString(Token.NAME, name, lineno, charno); node.setLength(name.length()); node.setStaticSourceFile(file); nodePos.setItem(node); nodePos.setPositionInformation(lineno, charno, lineno, charno + name.length()); currentMarker.setNameNode(nodePos); } } /** * Records a block-level description. * * @return {@code true} if the description was recorded. */ public boolean recordBlockDescription(String description) { populated = true; return currentInfo.documentBlock(description); } /** * Records a visibility. * * @return {@code true} if the visibility was recorded and {@code false} * if it was already defined */ public boolean recordVisibility(Visibility visibility) { if (currentInfo.getVisibility() == null) { populated = true; currentInfo.setVisibility(visibility); return true; } else { return false; } } /** * Records a typed parameter. * * @return {@code true} if the typed parameter was recorded and * {@code false} if a parameter with the same name was already defined */ public boolean recordParameter(String parameterName, JSTypeExpression type) { if (!hasAnySingletonTypeTags() && currentInfo.declareParam(type, parameterName)) { populated = true; return true; } else { return false; } } /** * Records a parameter's description. * * @return {@code true} if the parameter's description was recorded and * {@code false} if a parameter with the same name was already defined */ public boolean recordParameterDescription( String parameterName, String description) { if (currentInfo.documentParam(parameterName, description)) { populated = true; return true; } else { return false; } } /** * Records a template type name. * * @return {@code true} if the template type name was recorded and * {@code false} if the input template type name was already defined. */ public boolean recordTemplateTypeName(String name) { if (currentInfo.declareTemplateTypeName(name)) { populated = true; return true; } else { return false; } } /** * Records a type transformation expression together with its template * type name. */ public boolean recordTypeTransformation(String name, Node expr) { if (currentInfo.declareTypeTransformation(name, expr)) { populated = true; return true; } else { return false; } } /** * Records a thrown type. */ public boolean recordThrowType(JSTypeExpression type) { if (type != null && !hasAnySingletonTypeTags()) { currentInfo.declareThrows(type); populated = true; return true; } return false; } /** * Records a throw type's description. * * @return {@code true} if the type's description was recorded and * {@code false} if a description with the same type was already defined */ public boolean recordThrowDescription( JSTypeExpression type, String description) { if (currentInfo.documentThrows(type, description)) { populated = true; return true; } else { return false; } } /** * Adds an author to the current information. */ public boolean addAuthor(String author) { if (currentInfo.documentAuthor(author)) { populated = true; return true; } else { return false; } } /** * Adds a reference ("@see") to the current information. */ public boolean addReference(String reference) { if (currentInfo.documentReference(reference)) { populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isConsistentIdGenerator()} flag set to * {@code true}. * * @return {@code true} if the consistentIdGenerator flag was recorded and * {@code false} if it was already recorded */ public boolean recordConsistentIdGenerator() { if (!currentInfo.isConsistentIdGenerator()) { currentInfo.setConsistentIdGenerator(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its {@link * JSDocInfo#isStableIdGenerator()} flag set to {@code true}. * * @return {@code true} if the stableIdGenerator flag was recorded and {@code false} if it was * already recorded. */ public boolean recordStableIdGenerator() { if (!currentInfo.isStableIdGenerator()) { currentInfo.setStableIdGenerator(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its {@link * JSDocInfo#isXidGenerator()} flag set to {@code true}. * * @return {@code true} if the isXidGenerator flag was recorded and {@code false} if it was * already recorded. */ public boolean recordXidGenerator() { if (!currentInfo.isXidGenerator()) { currentInfo.setXidGenerator(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its {@link * JSDocInfo#isStableIdGenerator()} flag set to {@code true}. * * @return {@code true} if the stableIdGenerator flag was recorded and {@code false} if it was * already recorded. */ public boolean recordMappedIdGenerator() { if (!currentInfo.isMappedIdGenerator()) { currentInfo.setMappedIdGenerator(true); populated = true; return true; } else { return false; } } /** * Records the version. */ public boolean recordVersion(String version) { if (currentInfo.documentVersion(version)) { populated = true; return true; } else { return false; } } /** * Records the deprecation reason. */ public boolean recordDeprecationReason(String reason) { if (currentInfo.setDeprecationReason(reason)) { populated = true; return true; } else { return false; } } /** * Returns whether a deprecation reason has been recorded. */ public boolean isDeprecationReasonRecorded() { return currentInfo.getDeprecationReason() != null; } /** * Records the list of suppressed warnings. */ public boolean recordSuppressions(Set<String> suppressions) { if (currentInfo.setSuppressions(suppressions)) { populated = true; return true; } else { return false; } } public void addSuppression(String suppression) { currentInfo.addSuppression(suppression); populated = true; } /** * Records the list of modifies warnings. */ public boolean recordModifies(Set<String> modifies) { if (!hasAnySingletonSideEffectTags() && currentInfo.setModifies(modifies)) { populated = true; return true; } else { return false; } } /** * Records a type. * * @return {@code true} if the type was recorded and {@code false} if * it is invalid or was already defined */ public boolean recordType(JSTypeExpression type) { if (type != null && !hasAnyTypeRelatedTags()) { currentInfo.setType(type); populated = true; return true; } else { return false; } } public void recordInlineType() { currentInfo.setInlineType(); } /** * Records that the {@link JSDocInfo} being built should be populated * with a {@code typedef}'d type. */ public boolean recordTypedef(JSTypeExpression type) { if (type != null && !hasAnyTypeRelatedTags() && currentInfo.declareTypedefType(type)) { populated = true; return true; } return false; } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isIdGenerator()} flag set to * {@code true}. * * @return {@code true} if the idGenerator flag was recorded and {@code false} * if it was already recorded */ public boolean recordIdGenerator() { if (!currentInfo.isIdGenerator()) { currentInfo.setIdGenerator(true); populated = true; return true; } else { return false; } } /** * Records a return type. * * @return {@code true} if the return type was recorded and {@code false} if * it is invalid or was already defined */ public boolean recordReturnType(JSTypeExpression jsType) { if (jsType != null && currentInfo.getReturnType() == null && !hasAnySingletonTypeTags()) { currentInfo.setReturnType(jsType); populated = true; return true; } else { return false; } } /** * Records a return description * * @return {@code true} if the return description was recorded and * {@code false} if it is invalid or was already defined */ public boolean recordReturnDescription(String description) { if (currentInfo.documentReturn(description)) { populated = true; return true; } else { return false; } } /** * Records the type of a define. * * 'Define' values are special constants that may be manipulated by * the compiler. They are designed to mimic the #define command in * the C preprocessor. */ public boolean recordDefineType(JSTypeExpression type) { if (type != null && !currentInfo.isConstant() && !currentInfo.isDefine() && recordType(type)) { currentInfo.setDefine(true); populated = true; return true; } else { return false; } } /** * Records a parameter type to an enum. * * @return {@code true} if the enum's parameter type was recorded and * {@code false} if it was invalid or already defined */ public boolean recordEnumParameterType(JSTypeExpression type) { if (type != null && !hasAnyTypeRelatedTags()) { currentInfo.setEnumParameterType(type); populated = true; return true; } else { return false; } } // TODO(tbreisacher): Disallow nullable types here. If someone writes // "@this {Foo}" in their JS we automatically treat it as though they'd written // "@this {!Foo}". But, if the type node is created in the compiler // (e.g. in the WizPass) we should explicitly add the '!' /** * Records a type for {@code @this} annotation. * * @return {@code true} if the type was recorded and * {@code false} if it is invalid or if it collided with {@code @enum} or * {@code @type} annotations */ public boolean recordThisType(JSTypeExpression type) { if (type != null && !hasAnySingletonTypeTags() && !currentInfo.hasThisType()) { currentInfo.setThisType(type); populated = true; return true; } else { return false; } } /** * Records a base type. * * @return {@code true} if the base type was recorded and {@code false} * if it was already defined */ public boolean recordBaseType(JSTypeExpression jsType) { if (jsType != null && !hasAnySingletonTypeTags() && !currentInfo.hasBaseType()) { currentInfo.setBaseType(jsType); populated = true; return true; } else { return false; } } /** * Changes a base type, even if one has already been set on currentInfo. * * @return {@code true} if the base type was changed successfully. */ public boolean changeBaseType(JSTypeExpression jsType) { if (jsType != null && !hasAnySingletonTypeTags()) { currentInfo.setBaseType(jsType); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isConstant()} flag set to {@code true}. * * @return {@code true} if the constancy was recorded and {@code false} * if it was already defined */ public boolean recordConstancy() { if (!currentInfo.isConstant()) { currentInfo.setConstant(true); populated = true; return true; } else { return false; } } /** * Records a description giving context for translation (i18n). * * @return {@code true} if the description was recorded and {@code false} * if the description was invalid or was already defined */ public boolean recordDescription(String description) { if (description != null && currentInfo.getDescription() == null) { currentInfo.setDescription(description); populated = true; return true; } else { return false; } } /** * Records a meaning giving context for translation (i18n). Different * meanings will result in different translations. * * @return {@code true} If the meaning was successfully updated. */ public boolean recordMeaning(String meaning) { if (meaning != null && currentInfo.getMeaning() == null) { currentInfo.setMeaning(meaning); populated = true; return true; } else { return false; } } /** * Records a fileoverview description. * * @return {@code true} if the description was recorded and {@code false} * if the description was invalid or was already defined. */ public boolean recordFileOverview(String description) { if (currentInfo.documentFileOverview(description)) { populated = true; return true; } else { return false; } } public boolean recordLicense(String license) { currentInfo.setLicense(license); populated = true; return true; } public boolean addLicense(String license) { String txt = currentInfo.getLicense(); if (txt == null) { txt = ""; } currentInfo.setLicense(txt + license); populated = true; return true; } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isHidden()} flag set to {@code true}. * * @return {@code true} if the hiddenness was recorded and {@code false} * if it was already defined */ public boolean recordHiddenness() { if (!currentInfo.isHidden()) { currentInfo.setHidden(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isNoCompile()} flag set to {@code true}. * * @return {@code true} if the no compile flag was recorded and {@code false} * if it was already recorded */ public boolean recordNoCompile() { if (!currentInfo.isNoCompile()) { currentInfo.setNoCompile(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isNoCollapse()} flag set to {@code true}. * * @return {@code true} if the no collapse flag was recorded and {@code false} * if it was already recorded */ public boolean recordNoCollapse() { if (!currentInfo.isNoCollapse()) { currentInfo.setNoCollapse(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isConstructor()} flag set to {@code true}. * * @return {@code true} if the constructor was recorded and {@code false} * if it was already defined or it was incompatible with the existing * flags */ public boolean recordConstructor() { if (!hasAnySingletonTypeTags() && !currentInfo.isConstructorOrInterface()) { currentInfo.setConstructor(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#usesImplicitMatch()} flag set to {@code true}. * * @return {@code true} if the {@code @record} tag was recorded and {@code false} * if it was already defined or it was incompatible with the existing * flags */ public boolean recordImplicitMatch() { if (!hasAnySingletonTypeTags() && !currentInfo.isInterface() && !currentInfo.isConstructor()) { currentInfo.setInterface(true); currentInfo.setImplicitMatch(true); populated = true; return true; } else { return false; } } /** * Whether the {@link JSDocInfo} being built will have its * {@link JSDocInfo#isConstructor()} flag set to {@code true}. */ public boolean isConstructorRecorded() { return currentInfo.isConstructor(); } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#makesUnrestricted()} flag set to {@code true}. * * @return {@code true} if annotation was recorded and {@code false} * if it was already defined or it was incompatible with the existing flags */ public boolean recordUnrestricted() { if (hasAnySingletonTypeTags() || currentInfo.isInterface() || currentInfo.makesDicts() || currentInfo.makesStructs() || currentInfo.makesUnrestricted()) { return false; } currentInfo.setUnrestricted(); populated = true; return true; } public boolean isUnrestrictedRecorded() { return currentInfo.makesUnrestricted(); } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isAbstract()} flag set to {@code true}. * * @return {@code true} if the flag was recorded and {@code false} * if it was already defined or it was incompatible with the existing flags */ public boolean recordAbstract() { if (!hasAnySingletonTypeTags() && !currentInfo.isInterface() && !currentInfo.isAbstract()) { currentInfo.setAbstract(); populated = true; return true; } return false; } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#makesStructs()} flag set to {@code true}. * * @return {@code true} if the struct was recorded and {@code false} * if it was already defined or it was incompatible with the existing flags */ public boolean recordStruct() { if (hasAnySingletonTypeTags() || currentInfo.makesDicts() || currentInfo.makesStructs() || currentInfo.makesUnrestricted()) { return false; } currentInfo.setStruct(); populated = true; return true; } public boolean isStructRecorded() { return currentInfo.makesStructs(); } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#makesDicts()} flag set to {@code true}. * * @return {@code true} if the dict was recorded and {@code false} * if it was already defined or it was incompatible with the existing flags */ public boolean recordDict() { if (hasAnySingletonTypeTags() || currentInfo.makesDicts() || currentInfo.makesStructs() || currentInfo.makesUnrestricted()) { return false; } currentInfo.setDict(); populated = true; return true; } public boolean isDictRecorded() { return currentInfo.makesDicts(); } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#shouldPreserveTry()} flag set to {@code true}. */ public boolean recordPreserveTry() { if (!currentInfo.shouldPreserveTry()) { currentInfo.setShouldPreserveTry(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isOverride()} flag set to {@code true}. */ public boolean recordOverride() { if (!currentInfo.isOverride()) { currentInfo.setOverride(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isNoAlias()} flag set to {@code true}. */ public boolean recordNoAlias() { if (!currentInfo.isNoAlias()) { currentInfo.setNoAlias(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isDeprecated()} flag set to {@code true}. */ public boolean recordDeprecated() { if (!currentInfo.isDeprecated()) { currentInfo.setDeprecated(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isInterface()} flag set to {@code true}. * * @return {@code true} if the flag was recorded and {@code false} * if it was already defined or it was incompatible with the existing flags */ public boolean recordInterface() { if (hasAnySingletonTypeTags() || currentInfo.isConstructor() || currentInfo.isInterface() || currentInfo.isAbstract()) { return false; } currentInfo.setInterface(true); populated = true; return true; } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isExport()} flag set to {@code true}. */ public boolean recordExport() { if (!currentInfo.isExport()) { currentInfo.setExport(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isExpose()} flag set to {@code true}. */ public boolean recordExpose() { if (!currentInfo.isExpose()) { currentInfo.setExpose(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isImplicitCast()} flag set to {@code true}. */ public boolean recordImplicitCast() { if (!currentInfo.isImplicitCast()) { currentInfo.setImplicitCast(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isNoSideEffects()} flag set to {@code true}. */ public boolean recordNoSideEffects() { if (!hasAnySingletonSideEffectTags() && !currentInfo.isNoSideEffects()) { currentInfo.setNoSideEffects(true); populated = true; return true; } else { return false; } } /** * Records that the {@link JSDocInfo} being built should have its * {@link JSDocInfo#isExterns()} flag set to {@code true}. */ public boolean recordExterns() { if (!currentInfo.isExterns()) { currentInfo.setExterns(true); populated = true; return true; } else { return false; } } /** * Whether the {@link JSDocInfo} being built will have its * {@link JSDocInfo#isInterface()} flag set to {@code true}. */ public boolean isInterfaceRecorded() { return currentInfo.isInterface(); } /** * @return Whether a parameter of the given name has already been recorded. */ public boolean hasParameter(String name) { return currentInfo.hasParameter(name); } /** * Records an implemented interface. */ public boolean recordImplementedInterface(JSTypeExpression interfaceName) { if (currentInfo.addImplementedInterface(interfaceName)) { populated = true; return true; } else { return false; } } /** * Records an extended interface type. */ public boolean recordExtendedInterface(JSTypeExpression interfaceType) { if (currentInfo.addExtendedInterface(interfaceType)) { populated = true; return true; } else { return false; } } /** * Records that we're lending to another name. */ public boolean recordLends(String name) { if (!hasAnyTypeRelatedTags()) { currentInfo.setLendsName(name); populated = true; return true; } else { return false; } } /** * Returns whether current JSDoc is annotated with {@code @ngInject}. */ public boolean isNgInjectRecorded() { return currentInfo.isNgInject(); } /** * Records that we'd like to add {@code $inject} property inferred from * parameters. */ public boolean recordNgInject(boolean ngInject) { if (!isNgInjectRecorded()) { currentInfo.setNgInject(ngInject); populated = true; return true; } else { return false; } } /** * Returns whether current JSDoc is annotated with {@code @jaggerInject}. */ public boolean isJaggerInjectRecorded() { return currentInfo.isJaggerInject(); } /** * Records annotation with {@code @jaggerInject}. */ public boolean recordJaggerInject(boolean inject) { if (!isJaggerInjectRecorded()) { currentInfo.setJaggerInject(inject); populated = true; return true; } return false; } /** * Returns whether current JSDoc is annotated with {@code @jaggerModule}. */ public boolean isJaggerModuleRecorded() { return currentInfo.isJaggerModule(); } /** * Records annotation with {@code @jaggerModule}. */ public boolean recordJaggerModule(boolean jaggerModule) { if (!isJaggerModuleRecorded()) { currentInfo.setJaggerModule(jaggerModule); populated = true; return true; } return false; } /** * Returns whether current JSDoc is annotated with {@code @jaggerProvide}. */ public boolean isJaggerProvideRecorded() { return currentInfo.isJaggerProvide(); } /** * Records annotation with {@code @jaggerProvide}. */ public boolean recordJaggerProvide(boolean jaggerProvide) { if (!isJaggerProvideRecorded()) { currentInfo.setJaggerProvide(jaggerProvide); populated = true; return true; } return false; } /** * Returns whether current JSDoc is annotated with {@code @jaggerProvide}. */ public boolean isJaggerProvidePromiseRecorded() { return currentInfo.isJaggerProvidePromise(); } /** * Records annotation with {@code @jaggerProvide}. */ public boolean recordJaggerProvidePromise(boolean jaggerPromise) { if (!isJaggerProvidePromiseRecorded()) { currentInfo.setJaggerProvidePromise(jaggerPromise); populated = true; return true; } return false; } /** * Returns whether current JSDoc is annotated with {@code @wizaction}. */ public boolean isWizactionRecorded() { return currentInfo.isWizaction(); } /** * Records that this method is to be exposed as a wizaction. */ public boolean recordWizaction() { if (!isWizactionRecorded()) { currentInfo.setWizaction(true); populated = true; return true; } else { return false; } } /** * Returns whether current JSDoc is annotated with {@code @polymerBehavior}. */ public boolean isPolymerBehaviorRecorded() { return currentInfo.isPolymerBehavior(); } /** * Records that this method is to be exposed as a polymerBehavior. */ public boolean recordPolymerBehavior() { if (!isPolymerBehaviorRecorded()) { currentInfo.setPolymerBehavior(true); populated = true; return true; } else { return false; } } public void mergePropertyBitfieldFrom(JSDocInfo other) { currentInfo.mergePropertyBitfieldFrom(other); } /** * Records a parameter that gets disposed. * * @return {@code true} if all the parameters was recorded and * {@code false} if a parameter with the same name was already defined */ public boolean recordDisposesParameter(List<String> parameterNames) { for (String parameterName : parameterNames) { if ((currentInfo.hasParameter(parameterName) || parameterName.equals("*")) && currentInfo.setDisposedParameter(parameterName)) { populated = true; } else { return false; } } return true; } /** * Whether the current doc info has other type tags, like * {@code @param} or {@code @return} or {@code @type} or etc. */ private boolean hasAnyTypeRelatedTags() { return currentInfo.isConstructor() || currentInfo.isInterface() || currentInfo.isAbstract() || currentInfo.getParameterCount() > 0 || currentInfo.hasReturnType() || currentInfo.hasBaseType() || currentInfo.getExtendedInterfacesCount() > 0 || currentInfo.getLendsName() != null || currentInfo.hasThisType() || hasAnySingletonTypeTags(); } /** * Whether the current doc info has any of the singleton type * tags that may not appear with other type tags, like * {@code @type} or {@code @typedef}. */ private boolean hasAnySingletonTypeTags() { return currentInfo.hasType() || currentInfo.hasTypedefType() || currentInfo.hasEnumParameterType(); } /** * Whether the current doc info has any of the singleton type * tags that may not appear with other type tags, like * {@code @type} or {@code @typedef}. */ private boolean hasAnySingletonSideEffectTags() { return currentInfo.isNoSideEffects() || currentInfo.hasModifies(); } }
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package org.jetbrains.kotlin.idea.codeInsight.unwrap; import com.intellij.testFramework.TestDataPath; import org.jetbrains.kotlin.test.JUnit3RunnerWithInners; import org.jetbrains.kotlin.test.KotlinTestUtils; import org.jetbrains.kotlin.test.TestMetadata; import org.jetbrains.kotlin.test.TestRoot; import org.junit.runner.RunWith; /** * This class is generated by {@link org.jetbrains.kotlin.testGenerator.generator.TestGenerator}. * DO NOT MODIFY MANUALLY. */ @SuppressWarnings("all") @TestRoot("idea/tests") @TestDataPath("$CONTENT_ROOT") @RunWith(JUnit3RunnerWithInners.class) public abstract class UnwrapRemoveTestGenerated extends AbstractUnwrapRemoveTest { @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/removeExpression") public static class RemoveExpression extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestExpressionRemover, this, testDataFilePath); } @TestMetadata("ifInBlock.kt") public void testIfInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/removeExpression/ifInBlock.kt"); } @TestMetadata("ifInExpressionInReturn.kt") public void testIfInExpressionInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/removeExpression/ifInExpressionInReturn.kt"); } @TestMetadata("ifInReturn.kt") public void testIfInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/removeExpression/ifInReturn.kt"); } @TestMetadata("tryInBlock.kt") public void testTryInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/removeExpression/tryInBlock.kt"); } @TestMetadata("tryInReturn.kt") public void testTryInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/removeExpression/tryInReturn.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/unwrapThen") public static class UnwrapThen extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestThenUnwrapper, this, testDataFilePath); } @TestMetadata("thenCompoundInBlock.kt") public void testThenCompoundInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapThen/thenCompoundInBlock.kt"); } @TestMetadata("thenCompoundInReturn.kt") public void testThenCompoundInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapThen/thenCompoundInReturn.kt"); } @TestMetadata("thenSimpleInReturn.kt") public void testThenSimpleInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapThen/thenSimpleInReturn.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/unwrapElse") public static class UnwrapElse extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestElseUnwrapper, this, testDataFilePath); } @TestMetadata("elseCompoundInBlock.kt") public void testElseCompoundInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapElse/elseCompoundInBlock.kt"); } @TestMetadata("elseCompoundInReturn.kt") public void testElseCompoundInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapElse/elseCompoundInReturn.kt"); } @TestMetadata("elseSimpleInReturn.kt") public void testElseSimpleInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapElse/elseSimpleInReturn.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/removeElse") public static class RemoveElse extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestElseRemover, this, testDataFilePath); } @TestMetadata("else.kt") public void testElse() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/removeElse/else.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/unwrapLoop") public static class UnwrapLoop extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestLoopUnwrapper, this, testDataFilePath); } @TestMetadata("doWhile.kt") public void testDoWhile() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLoop/doWhile.kt"); } @TestMetadata("for.kt") public void testFor() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLoop/for.kt"); } @TestMetadata("while.kt") public void testWhile() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLoop/while.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/unwrapTry") public static class UnwrapTry extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestTryUnwrapper, this, testDataFilePath); } @TestMetadata("tryCompoundInBlock.kt") public void testTryCompoundInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapTry/tryCompoundInBlock.kt"); } @TestMetadata("tryCompoundInReturn.kt") public void testTryCompoundInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapTry/tryCompoundInReturn.kt"); } @TestMetadata("trySimpleInReturn.kt") public void testTrySimpleInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapTry/trySimpleInReturn.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/unwrapCatch") public static class UnwrapCatch extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestCatchUnwrapper, this, testDataFilePath); } @TestMetadata("catchCompoundInBlock.kt") public void testCatchCompoundInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapCatch/catchCompoundInBlock.kt"); } @TestMetadata("catchCompoundInReturn.kt") public void testCatchCompoundInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapCatch/catchCompoundInReturn.kt"); } @TestMetadata("catchSimpleInReturn.kt") public void testCatchSimpleInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapCatch/catchSimpleInReturn.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/removeCatch") public static class RemoveCatch extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestCatchRemover, this, testDataFilePath); } @TestMetadata("catch.kt") public void testCatch() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/removeCatch/catch.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/unwrapFinally") public static class UnwrapFinally extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestFinallyUnwrapper, this, testDataFilePath); } @TestMetadata("finallyCompoundInBlock.kt") public void testFinallyCompoundInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapFinally/finallyCompoundInBlock.kt"); } @TestMetadata("finallyCompoundInReturn.kt") public void testFinallyCompoundInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapFinally/finallyCompoundInReturn.kt"); } @TestMetadata("finallySimpleInReturn.kt") public void testFinallySimpleInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapFinally/finallySimpleInReturn.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/removeFinally") public static class RemoveFinally extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestFinallyRemover, this, testDataFilePath); } @TestMetadata("finallyInBlock.kt") public void testFinallyInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/removeFinally/finallyInBlock.kt"); } @TestMetadata("finallyInReturn.kt") public void testFinallyInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/removeFinally/finallyInReturn.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/unwrapLambda") public static class UnwrapLambda extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestLambdaUnwrapper, this, testDataFilePath); } @TestMetadata("lambdaCallCompoundInBlock.kt") public void testLambdaCallCompoundInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLambda/lambdaCallCompoundInBlock.kt"); } @TestMetadata("lambdaCallCompoundInReturn.kt") public void testLambdaCallCompoundInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLambda/lambdaCallCompoundInReturn.kt"); } @TestMetadata("lambdaCallInBlock.kt") public void testLambdaCallInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLambda/lambdaCallInBlock.kt"); } @TestMetadata("lambdaCallInBlock2.kt") public void testLambdaCallInBlock2() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLambda/lambdaCallInBlock2.kt"); } @TestMetadata("lambdaCallSimpleInReturn.kt") public void testLambdaCallSimpleInReturn() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLambda/lambdaCallSimpleInReturn.kt"); } @TestMetadata("lambdaInBlock.kt") public void testLambdaInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLambda/lambdaInBlock.kt"); } @TestMetadata("lambdaNonLocalPropertyCompoundInBlock.kt") public void testLambdaNonLocalPropertyCompoundInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLambda/lambdaNonLocalPropertyCompoundInBlock.kt"); } @TestMetadata("lambdaNonLocalPropertyInBlock.kt") public void testLambdaNonLocalPropertyInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLambda/lambdaNonLocalPropertyInBlock.kt"); } @TestMetadata("lambdaPropertyCompoundInBlock.kt") public void testLambdaPropertyCompoundInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLambda/lambdaPropertyCompoundInBlock.kt"); } @TestMetadata("lambdaPropertyInBlock.kt") public void testLambdaPropertyInBlock() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapLambda/lambdaPropertyInBlock.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/codeInsight/unwrapAndRemove/unwrapFunctionParameter") public static class UnwrapFunctionParameter extends AbstractUnwrapRemoveTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTestFunctionParameterUnwrapper, this, testDataFilePath); } @TestMetadata("functionHasMultiParam.kt") public void testFunctionHasMultiParam() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapFunctionParameter/functionHasMultiParam.kt"); } @TestMetadata("functionHasSingleParam.kt") public void testFunctionHasSingleParam() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapFunctionParameter/functionHasSingleParam.kt"); } @TestMetadata("functionWithReceiver.kt") public void testFunctionWithReceiver() throws Exception { runTest("testData/codeInsight/unwrapAndRemove/unwrapFunctionParameter/functionWithReceiver.kt"); } } }
/* * Copyright (c) 2005-2011 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.reports.branchreport; import static org.mifos.framework.util.helpers.MoneyUtils.getMoneyAmount; import static org.mifos.reports.util.helpers.ReportUtils.toDisplayDate; import java.math.BigDecimal; import java.util.Date; import org.mifos.config.AccountingRules; import org.mifos.framework.business.AbstractBusinessObject; import org.mifos.framework.util.helpers.Money; public class BranchReportStaffSummaryBO extends AbstractBusinessObject { @SuppressWarnings("unused") private BranchReportBO branchReport; @SuppressWarnings("unused") private Integer staffSummaryId; private Short personnelId; private String personnelName; private Integer activeBorrowersCount; private Integer activeLoansCount; private Date joiningDate; private Integer centerCount; private Integer clientCount; private Money loanAmountOutstanding; private Money interestAndFeesAmountOutstanding; private BigDecimal portfolioAtRisk; private Integer totalClientsEnrolled; private Integer clientsEnrolledThisMonth; private Money loanArrearsAmount; public BranchReportStaffSummaryBO(Short personnelId, String personnelName, Date joiningDate, Integer borrowersCount, Integer activeLoansCount, Integer centerCount, Integer clientCount, Money loanAmountOutstanding, Money interestAndFeesAmountOutstanding, BigDecimal portfolioAtRisk, Integer totalClientsEnrolled, Integer clientsEnrolledThisMonth, Money loanArrearsAmount) { this.personnelId = personnelId; this.joiningDate = joiningDate; this.activeBorrowersCount = borrowersCount; this.activeLoansCount = activeLoansCount; this.personnelName = personnelName; this.centerCount = centerCount; this.clientCount = clientCount; this.loanAmountOutstanding = loanAmountOutstanding; this.interestAndFeesAmountOutstanding = interestAndFeesAmountOutstanding; this.portfolioAtRisk = portfolioAtRisk; this.totalClientsEnrolled = totalClientsEnrolled; this.clientsEnrolledThisMonth = clientsEnrolledThisMonth; this.loanArrearsAmount = loanArrearsAmount; } protected BranchReportStaffSummaryBO() { } public String getJoiningDateStr() { return toDisplayDate(joiningDate); } public void setBranchReport(BranchReportBO branchReport) { this.branchReport = branchReport; } public Integer getActiveLoansCount() { return activeLoansCount; } public Integer getActiveBorrowersCount() { return activeBorrowersCount; } public String getPersonnelName() { return personnelName; } public Short getPersonnelId() { return personnelId; } public void setCenterCount(Integer centerCount) { this.centerCount = centerCount; } public void setClientCount(Integer clientCount) { this.clientCount = clientCount; } public Integer getCenterCount() { return centerCount; } public Integer getClientCount() { return clientCount; } public BigDecimal getLoanAmountOutstanding() { return getMoneyAmount(loanAmountOutstanding, AccountingRules.getDigitsAfterDecimal()); } public BigDecimal getInterestAndFeesAmountOutstanding() { return getMoneyAmount(interestAndFeesAmountOutstanding, AccountingRules.getDigitsAfterDecimal()); } public void setInterestAndFeesAmountOutstanding(Money interestAndFeesAmountOutstanding) { this.interestAndFeesAmountOutstanding = interestAndFeesAmountOutstanding; } public void setLoanAmountOutstanding(Money loanAmountOutstanding) { this.loanAmountOutstanding = loanAmountOutstanding; } public void setPortfolioAtRisk(BigDecimal portfolioAtRisk) { this.portfolioAtRisk = portfolioAtRisk; } public BigDecimal getPortfolioAtRisk() { return portfolioAtRisk; } public void setTotalClientsEnrolled(Integer totalClientsFormedBy) { this.totalClientsEnrolled = totalClientsFormedBy; } public Integer getTotalClientsEnrolled() { return totalClientsEnrolled; } public void setClientsEnrolledThisMonth(Integer clientsFormedByThisMonth) { this.clientsEnrolledThisMonth = clientsFormedByThisMonth; } public Integer getClientsEnrolledThisMonth() { return clientsEnrolledThisMonth; } public void setLoanArrearsAmount(Money loanArrearsAmount) { this.loanArrearsAmount = loanArrearsAmount; } public BigDecimal getLoanArrearsAmount() { return getMoneyAmount(loanArrearsAmount, AccountingRules.getDigitsAfterDecimal()); } @Override public int hashCode() { final int PRIME = 31; int result = super.hashCode(); result = PRIME * result + ((activeBorrowersCount == null) ? 0 : activeBorrowersCount.hashCode()); result = PRIME * result + ((activeLoansCount == null) ? 0 : activeLoansCount.hashCode()); result = PRIME * result + ((centerCount == null) ? 0 : centerCount.hashCode()); result = PRIME * result + ((clientCount == null) ? 0 : clientCount.hashCode()); result = PRIME * result + ((clientsEnrolledThisMonth == null) ? 0 : clientsEnrolledThisMonth.hashCode()); result = PRIME * result + ((interestAndFeesAmountOutstanding == null) ? 0 : interestAndFeesAmountOutstanding.hashCode()); result = PRIME * result + ((joiningDate == null) ? 0 : joiningDate.hashCode()); result = PRIME * result + ((loanAmountOutstanding == null) ? 0 : loanAmountOutstanding.hashCode()); result = PRIME * result + ((loanArrearsAmount == null) ? 0 : loanArrearsAmount.hashCode()); result = PRIME * result + ((personnelId == null) ? 0 : personnelId.hashCode()); result = PRIME * result + ((personnelName == null) ? 0 : personnelName.hashCode()); result = PRIME * result + ((portfolioAtRisk == null) ? 0 : portfolioAtRisk.hashCode()); result = PRIME * result + ((staffSummaryId == null) ? 0 : staffSummaryId.hashCode()); result = PRIME * result + ((totalClientsEnrolled == null) ? 0 : totalClientsEnrolled.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (getClass() != obj.getClass()) { return false; } final BranchReportStaffSummaryBO other = (BranchReportStaffSummaryBO) obj; if (activeBorrowersCount == null) { if (other.activeBorrowersCount != null) { return false; } } else if (!activeBorrowersCount.equals(other.activeBorrowersCount)) { return false; } if (activeLoansCount == null) { if (other.activeLoansCount != null) { return false; } } else if (!activeLoansCount.equals(other.activeLoansCount)) { return false; } if (centerCount == null) { if (other.centerCount != null) { return false; } } else if (!centerCount.equals(other.centerCount)) { return false; } if (clientCount == null) { if (other.clientCount != null) { return false; } } else if (!clientCount.equals(other.clientCount)) { return false; } if (clientsEnrolledThisMonth == null) { if (other.clientsEnrolledThisMonth != null) { return false; } } else if (!clientsEnrolledThisMonth.equals(other.clientsEnrolledThisMonth)) { return false; } if (interestAndFeesAmountOutstanding == null) { if (other.interestAndFeesAmountOutstanding != null) { return false; } } else if (!interestAndFeesAmountOutstanding.equals(other.interestAndFeesAmountOutstanding)) { return false; } if (joiningDate == null) { if (other.joiningDate != null) { return false; } } else if (!joiningDate.equals(other.joiningDate)) { return false; } if (loanAmountOutstanding == null) { if (other.loanAmountOutstanding != null) { return false; } } else if (!loanAmountOutstanding.equals(other.loanAmountOutstanding)) { return false; } if (loanArrearsAmount == null) { if (other.loanArrearsAmount != null) { return false; } } else if (!loanArrearsAmount.equals(other.loanArrearsAmount)) { return false; } if (personnelId == null) { if (other.personnelId != null) { return false; } } else if (!personnelId.equals(other.personnelId)) { return false; } if (personnelName == null) { if (other.personnelName != null) { return false; } } else if (!personnelName.equals(other.personnelName)) { return false; } if (portfolioAtRisk == null) { if (other.portfolioAtRisk != null) { return false; } } else if (!portfolioAtRisk.equals(other.portfolioAtRisk)) { return false; } if (staffSummaryId == null) { if (other.staffSummaryId != null) { return false; } } else if (!staffSummaryId.equals(other.staffSummaryId)) { return false; } if (totalClientsEnrolled == null) { if (other.totalClientsEnrolled != null) { return false; } } else if (!totalClientsEnrolled.equals(other.totalClientsEnrolled)) { return false; } return true; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive.orc; import com.facebook.presto.hive.HiveClientConfig; import com.facebook.presto.hive.HiveColumnHandle; import com.facebook.presto.hive.HivePageSourceFactory; import com.facebook.presto.hive.HivePartitionKey; import com.facebook.presto.orc.OrcDataSource; import com.facebook.presto.orc.OrcPredicate; import com.facebook.presto.orc.OrcReader; import com.facebook.presto.orc.OrcRecordReader; import com.facebook.presto.orc.TupleDomainOrcPredicate; import com.facebook.presto.orc.TupleDomainOrcPredicate.ColumnReference; import com.facebook.presto.orc.memory.AggregatedMemoryContext; import com.facebook.presto.orc.metadata.MetadataReader; import com.facebook.presto.orc.metadata.OrcMetadataReader; import com.facebook.presto.spi.ConnectorPageSource; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeManager; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.airlift.units.DataSize; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.orc.OrcSerde; import org.joda.time.DateTimeZone; import javax.inject.Inject; import java.io.FileNotFoundException; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.regex.Pattern; import static com.facebook.presto.hive.HiveErrorCode.HIVE_CANNOT_OPEN_SPLIT; import static com.facebook.presto.hive.HiveErrorCode.HIVE_FILE_MISSING_COLUMN_NAMES; import static com.facebook.presto.hive.HiveErrorCode.HIVE_MISSING_DATA; import static com.facebook.presto.hive.HiveSessionProperties.getOrcMaxBufferSize; import static com.facebook.presto.hive.HiveSessionProperties.getOrcMaxMergeDistance; import static com.facebook.presto.hive.HiveSessionProperties.getOrcStreamBufferSize; import static com.facebook.presto.hive.HiveSessionProperties.isOptimizedReaderEnabled; import static com.facebook.presto.hive.HiveUtil.isDeserializerClass; import static com.google.common.base.Strings.nullToEmpty; import static java.lang.String.format; import static java.util.Objects.requireNonNull; public class OrcPageSourceFactory implements HivePageSourceFactory { private static final Pattern DEFAULT_HIVE_COLUMN_NAME_PATTERN = Pattern.compile("_col\\d+"); private final TypeManager typeManager; private final boolean useOrcColumnNames; @Inject public OrcPageSourceFactory(TypeManager typeManager, HiveClientConfig config) { this(typeManager, requireNonNull(config, "hiveClientConfig is null").isUseOrcColumnNames()); } public OrcPageSourceFactory(TypeManager typeManager, boolean useOrcColumnNames) { this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.useOrcColumnNames = useOrcColumnNames; } @Override public Optional<? extends ConnectorPageSource> createPageSource( Configuration configuration, ConnectorSession session, Path path, long start, long length, Properties schema, List<HiveColumnHandle> columns, List<HivePartitionKey> partitionKeys, TupleDomain<HiveColumnHandle> effectivePredicate, DateTimeZone hiveStorageTimeZone) { if (!isOptimizedReaderEnabled(session)) { return Optional.empty(); } if (!isDeserializerClass(schema, OrcSerde.class)) { return Optional.empty(); } return Optional.of(createOrcPageSource( new OrcMetadataReader(), configuration, path, start, length, columns, partitionKeys, useOrcColumnNames, effectivePredicate, hiveStorageTimeZone, typeManager, getOrcMaxMergeDistance(session), getOrcMaxBufferSize(session), getOrcStreamBufferSize(session))); } public static OrcPageSource createOrcPageSource(MetadataReader metadataReader, Configuration configuration, Path path, long start, long length, List<HiveColumnHandle> columns, List<HivePartitionKey> partitionKeys, boolean useOrcColumnNames, TupleDomain<HiveColumnHandle> effectivePredicate, DateTimeZone hiveStorageTimeZone, TypeManager typeManager, DataSize maxMergeDistance, DataSize maxBufferSize, DataSize streamBufferSize) { OrcDataSource orcDataSource; try { FileSystem fileSystem = path.getFileSystem(configuration); long size = fileSystem.getFileStatus(path).getLen(); FSDataInputStream inputStream = fileSystem.open(path); orcDataSource = new HdfsOrcDataSource(path.toString(), size, maxMergeDistance, maxBufferSize, streamBufferSize, inputStream); } catch (Exception e) { if (nullToEmpty(e.getMessage()).trim().equals("Filesystem closed") || e instanceof FileNotFoundException) { throw new PrestoException(HIVE_CANNOT_OPEN_SPLIT, e); } throw new PrestoException(HIVE_CANNOT_OPEN_SPLIT, splitError(e, path, start, length), e); } AggregatedMemoryContext systemMemoryUsage = new AggregatedMemoryContext(); try { OrcReader reader = new OrcReader(orcDataSource, metadataReader, maxMergeDistance, maxBufferSize); List<HiveColumnHandle> physicalColumns = getPhysicalHiveColumnHandles(columns, useOrcColumnNames, reader, path); ImmutableMap.Builder<Integer, Type> includedColumns = ImmutableMap.builder(); ImmutableList.Builder<ColumnReference<HiveColumnHandle>> columnReferences = ImmutableList.builder(); for (HiveColumnHandle column : physicalColumns) { if (!column.isPartitionKey()) { Type type = typeManager.getType(column.getTypeSignature()); includedColumns.put(column.getHiveColumnIndex(), type); columnReferences.add(new ColumnReference<>(column, column.getHiveColumnIndex(), type)); } } OrcPredicate predicate = new TupleDomainOrcPredicate<>(effectivePredicate, columnReferences.build()); OrcRecordReader recordReader = reader.createRecordReader( includedColumns.build(), predicate, start, length, hiveStorageTimeZone, systemMemoryUsage); return new OrcPageSource( recordReader, orcDataSource, partitionKeys, physicalColumns, hiveStorageTimeZone, typeManager, systemMemoryUsage); } catch (Exception e) { try { orcDataSource.close(); } catch (IOException ignored) { } if (e instanceof PrestoException) { throw (PrestoException) e; } String message = splitError(e, path, start, length); if (e.getClass().getSimpleName().equals("BlockMissingException")) { throw new PrestoException(HIVE_MISSING_DATA, message, e); } throw new PrestoException(HIVE_CANNOT_OPEN_SPLIT, message, e); } } private static String splitError(Throwable t, Path path, long start, long length) { return format("Error opening Hive split %s (offset=%s, length=%s): %s", path, start, length, t.getMessage()); } private static List<HiveColumnHandle> getPhysicalHiveColumnHandles(List<HiveColumnHandle> columns, boolean useOrcColumnNames, OrcReader reader, Path path) { if (!useOrcColumnNames) { return columns; } verifyFileHasColumnNames(reader.getColumnNames(), path); Map<String, Integer> physicalNameOrdinalMap = buildPhysicalNameOrdinalMap(reader); int nextMissingColumnIndex = physicalNameOrdinalMap.size(); ImmutableList.Builder<HiveColumnHandle> physicalColumns = ImmutableList.builder(); for (HiveColumnHandle column : columns) { Integer physicalOrdinal = physicalNameOrdinalMap.get(column.getName()); if (physicalOrdinal == null) { // if the column is missing from the file, assign it a column number larger // than the number of columns in the file so the reader will fill it with nulls physicalOrdinal = nextMissingColumnIndex; nextMissingColumnIndex++; } physicalColumns.add(new HiveColumnHandle(column.getClientId(), column.getName(), column.getHiveType(), column.getTypeSignature(), physicalOrdinal, column.isPartitionKey())); } return physicalColumns.build(); } private static void verifyFileHasColumnNames(List<String> physicalColumnNames, Path path) { if (physicalColumnNames.stream().allMatch(physicalColumnName -> DEFAULT_HIVE_COLUMN_NAME_PATTERN.matcher(physicalColumnName).matches())) { throw new PrestoException( HIVE_FILE_MISSING_COLUMN_NAMES, "ORC file does not contain column names in the footer: " + path); } } private static Map<String, Integer> buildPhysicalNameOrdinalMap(OrcReader reader) { ImmutableMap.Builder<String, Integer> physicalNameOrdinalMap = ImmutableMap.builder(); int ordinal = 0; for (String physicalColumnName : reader.getColumnNames()) { physicalNameOrdinalMap.put(physicalColumnName, ordinal); ordinal++; } return physicalNameOrdinalMap.build(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.presto.block.Block; import com.facebook.presto.block.BlockBuilder; import com.facebook.presto.metadata.ColumnFileHandle; import com.facebook.presto.metadata.LocalStorageManager; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.Split; import com.facebook.presto.split.NativeSplit; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.facebook.presto.tuple.TupleInfo; import com.google.common.base.Suppliers; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.ListenableFuture; import java.io.IOException; import java.util.List; import java.util.concurrent.atomic.AtomicReference; import static com.facebook.presto.tuple.TupleInfo.SINGLE_LONG; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; public class TableWriterOperator implements SourceOperator { public static class TableWriterOperatorFactory implements SourceOperatorFactory { private final int operatorId; private final PlanNodeId sourceId; private final LocalStorageManager storageManager; private final String nodeIdentifier; private final List<ColumnHandle> columnHandles; public TableWriterOperatorFactory( int operatorId, PlanNodeId sourceId, LocalStorageManager storageManager, String nodeIdentifier, List<ColumnHandle> columnHandles) { this.operatorId = operatorId; this.sourceId = checkNotNull(sourceId, "sourceId is null"); this.storageManager = checkNotNull(storageManager, "storageManager is null"); this.nodeIdentifier = checkNotNull(nodeIdentifier, "nodeIdentifier is null"); this.columnHandles = ImmutableList.copyOf(checkNotNull(columnHandles, "columnHandles is null")); } @Override public PlanNodeId getSourceId() { return sourceId; } @Override public List<TupleInfo> getTupleInfos() { return ImmutableList.of(SINGLE_LONG); } @Override public SourceOperator createOperator(DriverContext driverContext) { try { OperatorContext operatorContext = driverContext.addOperatorContext(operatorId, TableWriterOperator.class.getSimpleName()); return new TableWriterOperator( operatorContext, sourceId, storageManager, nodeIdentifier, columnHandles); } catch (IOException e) { throw Throwables.propagate(e); } } @Override public void close() { } } private enum State { RUNNING, FINISHING, FINISHED } private final OperatorContext operatorContext; private final PlanNodeId sourceId; private final LocalStorageManager storageManager; private final String nodeIdentifier; private final List<ColumnHandle> columnHandles; private ColumnFileHandle columnFileHandle; private final AtomicReference<NativeSplit> input = new AtomicReference<>(); private State state = State.RUNNING; private long rowCount; public TableWriterOperator( OperatorContext operatorContext, PlanNodeId sourceId, LocalStorageManager storageManager, String nodeIdentifier, List<ColumnHandle> columnHandles) throws IOException { this.operatorContext = checkNotNull(operatorContext, "operatorContext is null"); this.sourceId = checkNotNull(sourceId, "sourceId is null"); this.storageManager = checkNotNull(storageManager, "storageManager is null"); this.nodeIdentifier = checkNotNull(nodeIdentifier, "nodeIdentifier is null"); this.columnHandles = ImmutableList.copyOf(columnHandles); } @Override public OperatorContext getOperatorContext() { return operatorContext; } @Override public PlanNodeId getSourceId() { return sourceId; } @Override public void addSplit(final Split split) { checkNotNull(split, "split is null"); checkState(split instanceof NativeSplit, "Non-native split added!"); checkState(input.get() == null, "Shard Id %s was already set!", input.get()); input.set((NativeSplit) split); Object splitInfo = split.getInfo(); if (splitInfo != null) { operatorContext.setInfoSupplier(Suppliers.ofInstance(splitInfo)); } } @Override public void noMoreSplits() { checkState(input.get() != null, "No shard id was set!"); } @Override public List<TupleInfo> getTupleInfos() { return ImmutableList.of(SINGLE_LONG); } @Override public void finish() { if (state == State.RUNNING) { state = State.FINISHING; } } @Override public boolean isFinished() { return state == State.FINISHED; } @Override public ListenableFuture<?> isBlocked() { return NOT_BLOCKED; } @Override public boolean needsInput() { return state == State.RUNNING; } @Override public void addInput(Page page) { checkNotNull(page, "page is null"); checkState(state == State.RUNNING, "Operator is finishing"); if (columnFileHandle == null) { try { columnFileHandle = storageManager.createStagingFileHandles(input.get().getShardId(), columnHandles); } catch (IOException e) { throw Throwables.propagate(e); } } rowCount += columnFileHandle.append(page); } @Override public Page getOutput() { if (state != State.FINISHING) { return null; } state = State.FINISHED; if (columnFileHandle != null) { try { storageManager.commit(columnFileHandle); } catch (IOException e) { throw Throwables.propagate(e); } operatorContext.addOutputItems(sourceId, ImmutableSet.of(new TableWriterResult(input.get().getShardId(), nodeIdentifier))); } Block block = new BlockBuilder(SINGLE_LONG).append(rowCount).build(); return new Page(block); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.bridge.service.controller.s3; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintWriter; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.util.ArrayList; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.xml.bind.DatatypeConverter; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.stream.XMLStreamException; import org.apache.log4j.Logger; import org.json.simple.parser.ParseException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import com.amazon.s3.GetBucketAccessControlPolicyResponse; import com.amazon.s3.ListAllMyBucketsResponse; import com.amazon.s3.ListBucketResponse; import com.cloud.bridge.io.MTOMAwareResultStreamWriter; import com.cloud.bridge.model.SAcl; import com.cloud.bridge.model.SBucket; import com.cloud.bridge.model.SHost; import com.cloud.bridge.persist.PersistContext; import com.cloud.bridge.persist.dao.BucketPolicyDao; import com.cloud.bridge.persist.dao.MultipartLoadDao; import com.cloud.bridge.persist.dao.SAclDao; import com.cloud.bridge.persist.dao.SBucketDao; import com.cloud.bridge.service.S3Constants; import com.cloud.bridge.service.S3RestServlet; import com.cloud.bridge.service.controller.s3.ServiceProvider; import com.cloud.bridge.service.UserContext; import com.cloud.bridge.service.core.s3.S3AccessControlList; import com.cloud.bridge.service.core.s3.S3AccessControlPolicy; import com.cloud.bridge.service.core.s3.S3AuthParams; import com.cloud.bridge.service.core.s3.S3BucketAdapter; import com.cloud.bridge.service.core.s3.S3BucketPolicy; import com.cloud.bridge.service.core.s3.S3CanonicalUser; import com.cloud.bridge.service.core.s3.S3CreateBucketConfiguration; import com.cloud.bridge.service.core.s3.S3CreateBucketRequest; import com.cloud.bridge.service.core.s3.S3CreateBucketResponse; import com.cloud.bridge.service.core.s3.S3DeleteBucketRequest; import com.cloud.bridge.service.core.s3.S3DeleteObjectRequest; import com.cloud.bridge.service.core.s3.S3Engine; import com.cloud.bridge.service.core.s3.S3GetBucketAccessControlPolicyRequest; import com.cloud.bridge.service.core.s3.S3Grant; import com.cloud.bridge.service.core.s3.S3ListAllMyBucketsEntry; import com.cloud.bridge.service.core.s3.S3ListAllMyBucketsRequest; import com.cloud.bridge.service.core.s3.S3ListAllMyBucketsResponse; import com.cloud.bridge.service.core.s3.S3ListBucketObjectEntry; import com.cloud.bridge.service.core.s3.S3ListBucketRequest; import com.cloud.bridge.service.core.s3.S3ListBucketResponse; import com.cloud.bridge.service.core.s3.S3MetaDataEntry; import com.cloud.bridge.service.core.s3.S3MultipartUpload; import com.cloud.bridge.service.core.s3.S3PolicyContext; import com.cloud.bridge.service.core.s3.S3PutObjectRequest; import com.cloud.bridge.service.core.s3.S3Response; import com.cloud.bridge.service.core.s3.S3SetBucketAccessControlPolicyRequest; import com.cloud.bridge.service.core.s3.S3BucketPolicy.PolicyAccess; import com.cloud.bridge.service.core.s3.S3PolicyAction.PolicyActions; import com.cloud.bridge.service.core.s3.S3PolicyCondition.ConditionKeys; import com.cloud.bridge.service.exception.InternalErrorException; import com.cloud.bridge.service.exception.InvalidBucketName; import com.cloud.bridge.service.exception.InvalidRequestContentException; import com.cloud.bridge.service.exception.NetworkIOException; import com.cloud.bridge.service.exception.NoSuchObjectException; import com.cloud.bridge.service.exception.ObjectAlreadyExistsException; import com.cloud.bridge.service.exception.OutOfServiceException; import com.cloud.bridge.service.exception.PermissionDeniedException; import com.cloud.bridge.util.Converter; import com.cloud.bridge.util.DateHelper; import com.cloud.bridge.util.HeaderParam; import com.cloud.bridge.util.PolicyParser; import com.cloud.bridge.util.StringHelper; import com.cloud.bridge.util.OrderedPair; import com.cloud.bridge.util.Triple; import com.cloud.bridge.util.XSerializer; import com.cloud.bridge.util.XSerializerXmlAdapter; import com.cloud.bridge.util.XmlHelper; /** * @author Kelven Yang, John Zucker */ public class S3BucketAction implements ServletAction { protected final static Logger logger = Logger.getLogger(S3BucketAction.class); private DocumentBuilderFactory dbf = null; public S3BucketAction() { dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware( true ); } public void execute(HttpServletRequest request, HttpServletResponse response) throws IOException, XMLStreamException { String method = request.getMethod(); String queryString = request.getQueryString(); if ( method.equalsIgnoreCase("PUT")) { if ( queryString != null && queryString.length() > 0 ) { if ( queryString.startsWith("acl")) { executePutBucketAcl(request, response); return; } else if (queryString.startsWith("versioning")) { executePutBucketVersioning(request, response); return; } else if (queryString.startsWith("policy")) { executePutBucketPolicy(request, response); return; } else if (queryString.startsWith("logging")) { executePutBucketLogging(request, response); return; } else if (queryString.startsWith("website")) { executePutBucketWebsite(request, response); return; } } executePutBucket(request, response); } else if(method.equalsIgnoreCase("GET") || method.equalsIgnoreCase("HEAD")) { if (queryString != null && queryString.length() > 0) { if ( queryString.startsWith("acl")) { executeGetBucketAcl(request, response); return; } else if (queryString.startsWith("versioning")) { executeGetBucketVersioning(request, response); return; } else if (queryString.contains("versions")) { executeGetBucketObjectVersions(request, response); return; } else if (queryString.startsWith("location")) { executeGetBucketLocation(request, response); return; } else if (queryString.startsWith("uploads")) { executeListMultipartUploads(request, response); return; } else if (queryString.startsWith("policy")) { executeGetBucketPolicy(request, response); return; } else if (queryString.startsWith("logging")) { executeGetBucketLogging(request, response); return; } else if (queryString.startsWith("website")) { executeGetBucketWebsite(request, response); return; } } String bucketAtr = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); if ( bucketAtr.equals( "/" )) executeGetAllBuckets(request, response); else executeGetBucket(request, response); } else if (method.equalsIgnoreCase("DELETE")) { if (queryString != null && queryString.length() > 0) { if ( queryString.startsWith("policy")) { executeDeleteBucketPolicy(request, response); return; } else if (queryString.startsWith("website")) { executeDeleteBucketWebsite(request, response); return; } } executeDeleteBucket(request, response); } else if ( (method.equalsIgnoreCase("POST")) && (queryString.equalsIgnoreCase("delete")) ) { executeMultiObjectDelete(request, response); } else throw new IllegalArgumentException("Unsupported method in REST request"); } private void executeMultiObjectDelete(HttpServletRequest request, HttpServletResponse response) throws IOException{ int contentLength = request.getContentLength(); StringBuffer xmlDeleteResponse = null; boolean quite = true; if(contentLength > 0) { InputStream is = null; String versionID =null; try { is = request.getInputStream(); String xml = StringHelper.stringFromStream(is); String elements[] = {"Key","VersionId"}; Document doc = XmlHelper.parse(xml); Node node = XmlHelper.getRootNode(doc); if(node == null) { System.out.println("Invalid XML document, no root element"); return; } xmlDeleteResponse = new StringBuffer("<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<DeleteResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">"); String bucket = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); S3DeleteObjectRequest engineRequest = new S3DeleteObjectRequest(); engineRequest.setBucketName( bucket ); is.close(); doc.getDocumentElement().normalize(); NodeList qList = doc.getElementsByTagName("Quiet"); if (qList.getLength() == 1 ) { Node qNode= qList.item(0); if ( qNode.getFirstChild().getNodeValue().equalsIgnoreCase("true") == false ) quite = false; logger.debug("Quite value :" + qNode.getFirstChild().getNodeValue()); } NodeList objList = doc.getElementsByTagName("Object"); for (int i = 0; i < objList.getLength(); i++) { Node key = objList.item(i); NodeList key_data = key.getChildNodes(); if (key.getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element) key; String key_name = getTagValue(elements[0], eElement); engineRequest.setBucketName(bucket); engineRequest.setKey(key_name); if (key_data.getLength() == 2) { versionID = getTagValue(elements[1], eElement); engineRequest.setVersion(versionID); } S3Response engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest( engineRequest ); int resultCode = engineResponse.getResultCode(); String resutlDesc = engineResponse.getResultDescription(); if(resultCode == 204) { if (quite) { // show response depending on quite/verbose xmlDeleteResponse.append("<Deleted><Key>"+key_name+"</Key>"); if (resutlDesc != null) xmlDeleteResponse.append(resutlDesc); xmlDeleteResponse.append("</Deleted>"); } } else { logger.debug("Error in delete ::" + key_name + " eng response:: " + engineResponse.getResultDescription()); xmlDeleteResponse.append("<Error><Key>"+key_name+"</Key>" ); if (resutlDesc != null) xmlDeleteResponse.append(resutlDesc); xmlDeleteResponse.append("</Error>"); } } } String version = engineRequest.getVersion(); if (null != version) response.addHeader( "x-amz-version-id", version ); } catch (IOException e) { logger.error("Unable to read request data due to " + e.getMessage(), e); throw new NetworkIOException(e); } finally { if(is != null) is.close(); } xmlDeleteResponse.append("</DeleteResult>"); } response.setStatus(200); response.setContentType("text/xml; charset=UTF-8"); S3RestServlet.endResponse(response, xmlDeleteResponse.toString()); } private String getTagValue(String sTag, Element eElement) { NodeList nlList = eElement.getElementsByTagName(sTag).item(0).getChildNodes(); Node nValue = (Node) nlList.item(0); return nValue.getNodeValue(); } /** * In order to support a policy on the "s3:CreateBucket" action we must be able to set and get * policies before a bucket is actually created. * * @param request * @param response * @throws IOException */ private void executePutBucketPolicy(HttpServletRequest request, HttpServletResponse response) throws IOException { String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); String policy = streamToString( request.getInputStream()); // [A] Is there an owner of an existing policy or bucket? BucketPolicyDao policyDao = new BucketPolicyDao(); SBucketDao bucketDao = new SBucketDao(); SBucket bucket = bucketDao.getByName( bucketName ); String owner = null; if ( null != bucket ) { owner = bucket.getOwnerCanonicalId(); } else { try { owner = policyDao.getPolicyOwner( bucketName ); } catch( Exception e ) {} } // [B] "The bucket owner by default has permissions to attach bucket policies to their buckets using PUT Bucket policy." // -> the bucket owner may want to restrict the IP address from where this can be executed String client = UserContext.current().getCanonicalUserId(); S3PolicyContext context = new S3PolicyContext( PolicyActions.PutBucketPolicy, bucketName ); switch( S3Engine.verifyPolicy( context )) { case ALLOW: break; case DEFAULT_DENY: if (null != owner && !client.equals( owner )) { response.setStatus(405); return; } break; case DENY: response.setStatus(403); return; } // [B] Place the policy into the database over writting an existing policy try { // -> first make sure that the policy is valid by parsing it PolicyParser parser = new PolicyParser(); S3BucketPolicy sbp = parser.parse( policy, bucketName ); policyDao.deletePolicy( bucketName ); if (null != policy && !policy.isEmpty()) policyDao.addPolicy( bucketName, client, policy ); if (null != sbp) ServiceProvider.getInstance().setBucketPolicy( bucketName, sbp ); response.setStatus(200); } catch( PermissionDeniedException e ) { logger.error("Put Bucket Policy failed due to " + e.getMessage(), e); throw e; } catch( ParseException e ) { logger.error("Put Bucket Policy failed due to " + e.getMessage(), e); throw new PermissionDeniedException( e.toString()); } catch( Exception e ) { logger.error("Put Bucket Policy failed due to " + e.getMessage(), e); response.setStatus(500); } } private void executeGetBucketPolicy(HttpServletRequest request, HttpServletResponse response) { String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); // [A] Is there an owner of an existing policy or bucket? BucketPolicyDao policyDao = new BucketPolicyDao(); SBucketDao bucketDao = new SBucketDao(); SBucket bucket = bucketDao.getByName( bucketName ); String owner = null; if ( null != bucket ) { owner = bucket.getOwnerCanonicalId(); } else { try { owner = policyDao.getPolicyOwner( bucketName ); } catch( Exception e ) {} } // [B] "The bucket owner by default has permissions to retrieve bucket policies using GET Bucket policy." // -> the bucket owner may want to restrict the IP address from where this can be executed String client = UserContext.current().getCanonicalUserId(); S3PolicyContext context = new S3PolicyContext( PolicyActions.GetBucketPolicy, bucketName ); switch( S3Engine.verifyPolicy( context )) { case ALLOW: break; case DEFAULT_DENY: if (null != owner && !client.equals( owner )) { response.setStatus(405); return; } break; case DENY: response.setStatus(403); return; } // [B] Pull the policy from the database if one exists try { String policy = policyDao.getPolicy( bucketName ); if ( null == policy ) { response.setStatus(404); } else { response.setStatus(200); response.setContentType("application/json"); S3RestServlet.endResponse(response, policy); } } catch( Exception e ) { logger.error("Get Bucket Policy failed due to " + e.getMessage(), e); response.setStatus(500); } } private void executeDeleteBucketPolicy(HttpServletRequest request, HttpServletResponse response) { String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); SBucketDao bucketDao = new SBucketDao(); SBucket bucket = bucketDao.getByName( bucketName ); if (bucket != null) { String client = UserContext.current().getCanonicalUserId(); if (!client.equals( bucket.getOwnerCanonicalId())) { response.setStatus(405); return; } } try { BucketPolicyDao policyDao = new BucketPolicyDao(); String policy = policyDao.getPolicy( bucketName ); if ( null == policy ) { response.setStatus(204); } else { ServiceProvider.getInstance().deleteBucketPolicy( bucketName ); policyDao.deletePolicy( bucketName ); response.setStatus(200); } } catch( Exception e ) { logger.error("Delete Bucket Policy failed due to " + e.getMessage(), e); response.setStatus(500); } } public void executeGetAllBuckets(HttpServletRequest request, HttpServletResponse response) throws IOException, XMLStreamException { Calendar cal = Calendar.getInstance(); cal.set( 1970, 1, 1 ); S3ListAllMyBucketsRequest engineRequest = new S3ListAllMyBucketsRequest(); engineRequest.setAccessKey(UserContext.current().getAccessKey()); engineRequest.setRequestTimestamp( cal ); engineRequest.setSignature( "" ); S3ListAllMyBucketsResponse engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest(engineRequest); // To allow the all buckets list to be serialized via Axiom classes ListAllMyBucketsResponse allBuckets = S3SerializableServiceImplementation.toListAllMyBucketsResponse( engineResponse ); OutputStream outputStream = response.getOutputStream(); response.setStatus(200); response.setContentType("application/xml"); // The content-type literally should be "application/xml; charset=UTF-8" // but any compliant JVM supplies utf-8 by default // MTOMAwareResultStreamWriter resultWriter = new MTOMAwareResultStreamWriter ("ListAllMyBucketsResult", outputStream ); // resultWriter.startWrite(); // resultWriter.writeout(allBuckets); // resultWriter.stopWrite(); StringBuffer xml = new StringBuffer(); xml.append( "<?xml version=\"1.0\" encoding=\"utf-8\"?>" ); xml.append("<ListAllMyBucketsResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">"); xml.append("<Owner><ID>"); xml.append(engineResponse.getOwner().getID()).append("</ID>"); xml.append("<DisplayName>").append(engineResponse.getOwner().getDisplayName()).append("</DisplayName>"); xml.append("</Owner>").append("<Buckets>"); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); for (S3ListAllMyBucketsEntry entry :engineResponse.getBuckets()) { xml.append("<Bucket>").append("<Name>").append(entry.getName()).append("</Name>"); xml.append("<CreationDate>").append(sdf.format(entry.getCreationDate().getTime())).append("</CreationDate>"); xml.append("</Bucket>"); } xml.append("</Buckets>").append("</ListAllMyBucketsResult>"); response.setStatus(200); response.setContentType("text/xml; charset=UTF-8"); S3RestServlet.endResponse(response, xml.toString()); } public void executeGetBucket(HttpServletRequest request, HttpServletResponse response) throws IOException, XMLStreamException { S3ListBucketRequest engineRequest = new S3ListBucketRequest(); engineRequest.setBucketName((String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY)); engineRequest.setDelimiter(request.getParameter("delimiter")); engineRequest.setMarker(request.getParameter("marker")); engineRequest.setPrefix(request.getParameter("prefix")); int maxKeys = Converter.toInt(request.getParameter("max-keys"), 1000); engineRequest.setMaxKeys(maxKeys); try { S3ListBucketResponse engineResponse = ServiceProvider.getInstance().getS3Engine().listBucketContents( engineRequest, false ); // To allow the all list buckets result to be serialized via Axiom classes ListBucketResponse oneBucket = S3SerializableServiceImplementation.toListBucketResponse( engineResponse ); OutputStream outputStream = response.getOutputStream(); response.setStatus(200); response.setContentType("application/xml"); // The content-type literally should be "application/xml; charset=UTF-8" // but any compliant JVM supplies utf-8 by default; MTOMAwareResultStreamWriter resultWriter = new MTOMAwareResultStreamWriter ("ListBucketResult", outputStream ); resultWriter.startWrite(); resultWriter.writeout(oneBucket); resultWriter.stopWrite(); } catch (NoSuchObjectException nsoe) { response.setStatus(404); response.setContentType("application/xml"); StringBuffer xmlError = new StringBuffer(); xmlError.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>") .append("<Error><Code>NoSuchBucket</Code><Message>The specified bucket does not exist</Message>") .append("<BucketName>").append((String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY)) .append("</BucketName>") .append("<RequestId>1DEADBEEF9</RequestId>") //TODO .append("<HostId>abCdeFgHiJ1k2LmN3op4q56r7st89</HostId>") //TODO .append("</Error>"); S3RestServlet.endResponse(response, xmlError.toString()); } } public void executeGetBucketAcl(HttpServletRequest request, HttpServletResponse response) throws IOException, XMLStreamException { S3GetBucketAccessControlPolicyRequest engineRequest = new S3GetBucketAccessControlPolicyRequest(); Calendar cal = Calendar.getInstance(); cal.set( 1970, 1, 1 ); engineRequest.setAccessKey(UserContext.current().getAccessKey()); engineRequest.setRequestTimestamp( cal ); engineRequest.setSignature( "" ); // TODO - Consider providing signature in a future release which allows additional user description engineRequest.setBucketName((String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY)); S3AccessControlPolicy engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest(engineRequest); // To allow the bucket acl policy result to be serialized via Axiom classes GetBucketAccessControlPolicyResponse onePolicy = S3SerializableServiceImplementation.toGetBucketAccessControlPolicyResponse( engineResponse ); OutputStream outputStream = response.getOutputStream(); response.setStatus(200); response.setContentType("application/xml"); // The content-type literally should be "application/xml; charset=UTF-8" // but any compliant JVM supplies utf-8 by default; MTOMAwareResultStreamWriter resultWriter = new MTOMAwareResultStreamWriter ("GetBucketAccessControlPolicyResult", outputStream ); resultWriter.startWrite(); resultWriter.writeout(onePolicy); resultWriter.stopWrite(); } public void executeGetBucketVersioning(HttpServletRequest request, HttpServletResponse response) throws IOException { // [A] Does the bucket exist? String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); String versioningStatus = null; if (null == bucketName) { logger.error( "executeGetBucketVersioning - no bucket name given" ); response.setStatus( 400 ); return; } SBucketDao bucketDao = new SBucketDao(); SBucket sbucket = bucketDao.getByName( bucketName ); if (sbucket == null) { response.setStatus( 404 ); return; } // [B] The owner may want to restrict the IP address at which this can be performed String client = UserContext.current().getCanonicalUserId(); if (!client.equals( sbucket.getOwnerCanonicalId())) throw new PermissionDeniedException( "Access Denied - only the owner can read bucket versioning" ); S3PolicyContext context = new S3PolicyContext( PolicyActions.GetBucketVersioning, bucketName ); if (PolicyAccess.DENY == S3Engine.verifyPolicy( context )) { response.setStatus(403); return; } // [C] switch( sbucket.getVersioningStatus()) { default: case 0: versioningStatus = ""; break; case 1: versioningStatus = "Enabled"; break; case 2: versioningStatus = "Suspended"; break; } StringBuffer xml = new StringBuffer(); xml.append( "<?xml version=\"1.0\" encoding=\"utf-8\"?>" ); xml.append( "<VersioningConfiguration xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">" ); if (0 < versioningStatus.length()) xml.append( "<Status>" ).append( versioningStatus ).append( "</Status>" ); xml.append( "</VersioningConfiguration>" ); response.setStatus(200); response.setContentType("text/xml; charset=UTF-8"); S3RestServlet.endResponse(response, xml.toString()); } public void executeGetBucketObjectVersions(HttpServletRequest request, HttpServletResponse response) throws IOException { S3ListBucketRequest engineRequest = new S3ListBucketRequest(); String keyMarker = request.getParameter("key-marker"); String versionIdMarker = request.getParameter("version-id-marker"); engineRequest.setBucketName((String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY)); engineRequest.setDelimiter(request.getParameter("delimiter")); engineRequest.setMarker( keyMarker ); engineRequest.setPrefix(request.getParameter("prefix")); engineRequest.setVersionIdMarker( versionIdMarker ); int maxKeys = Converter.toInt(request.getParameter("max-keys"), 1000); engineRequest.setMaxKeys(maxKeys); S3ListBucketResponse engineResponse = ServiceProvider.getInstance().getS3Engine().listBucketContents( engineRequest, true ); // -> the SOAP version produces different XML StringBuffer xml = new StringBuffer(); xml.append( "<?xml version=\"1.0\" encoding=\"utf-8\"?>" ); xml.append( "<ListVersionsResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">" ); xml.append( "<Name>" ).append( engineResponse.getBucketName()).append( "</Name>" ); if ( null == keyMarker ) xml.append( "<KeyMarker/>" ); else xml.append( "<KeyMarker>" ).append( keyMarker ).append( "</KeyMarker" ); if ( null == versionIdMarker ) xml.append( "<VersionIdMarker/>" ); else xml.append( "<VersionIdMarker>" ).append( keyMarker ).append( "</VersionIdMarker" ); xml.append( "<MaxKeys>" ).append( engineResponse.getMaxKeys()).append( "</MaxKeys>" ); xml.append( "<IsTruncated>" ).append( engineResponse.isTruncated()).append( "</IsTruncated>" ); S3ListBucketObjectEntry[] versions = engineResponse.getContents(); for( int i=0; null != versions && i < versions.length; i++ ) { S3CanonicalUser owner = versions[i].getOwner(); boolean isDeletionMarker = versions[i].getIsDeletionMarker(); String displayName = owner.getDisplayName(); String id = owner.getID(); if ( isDeletionMarker ) { xml.append( "<DeleteMarker>" ); xml.append( "<Key>" ).append( versions[i].getKey()).append( "</Key>" ); xml.append( "<VersionId>" ).append( versions[i].getVersion()).append( "</VersionId>" ); xml.append( "<IsLatest>" ).append( versions[i].getIsLatest()).append( "</IsLatest>" ); xml.append( "<LastModified>" ).append( DatatypeConverter.printDateTime( versions[i].getLastModified())).append( "</LastModified>" ); } else { xml.append( "<Version>" ); xml.append( "<Key>" ).append( versions[i].getKey()).append( "</Key>" ); xml.append( "<VersionId>" ).append( versions[i].getVersion()).append( "</VersionId>" ); xml.append( "<IsLatest>" ).append( versions[i].getIsLatest()).append( "</IsLatest>" ); xml.append( "<LastModified>" ).append( DatatypeConverter.printDateTime( versions[i].getLastModified())).append( "</LastModified>" ); xml.append( "<ETag>" ).append( versions[i].getETag()).append( "</ETag>" ); xml.append( "<Size>" ).append( versions[i].getSize()).append( "</Size>" ); xml.append( "<StorageClass>" ).append( versions[i].getStorageClass()).append( "</StorageClass>" ); } xml.append( "<Owner>" ); xml.append( "<ID>" ).append( id ).append( "</ID>" ); if ( null == displayName ) xml.append( "<DisplayName/>" ); else xml.append( "<DisplayName>" ).append( owner.getDisplayName()).append( "</DisplayName>" ); xml.append( "</Owner>" ); if ( isDeletionMarker ) xml.append( "</DeleteMarker>" ); else xml.append( "</Version>" ); } xml.append( "</ListVersionsResult>" ); response.setStatus(200); response.setContentType("text/xml; charset=UTF-8"); S3RestServlet.endResponse(response, xml.toString()); } public void executeGetBucketLogging(HttpServletRequest request, HttpServletResponse response) throws IOException { // TODO -- Review this in future. Currently this is a beta feature of S3 response.setStatus(405); } public void executeGetBucketLocation(HttpServletRequest request, HttpServletResponse response) throws IOException { // TODO - This is a fakery! We don't actually store location in backend StringBuffer xml = new StringBuffer(); xml.append( "<?xml version=\"1.0\" encoding=\"utf-8\"?>" ); xml.append( "<LocationConstraint xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">" ); // This is the real fakery xml.append( "us-west-2" ); xml.append( "</LocationConstraint>" ); response.setStatus(200); response.setContentType("text/xml; charset=UTF-8"); S3RestServlet.endResponse(response, xml.toString()); } public void executeGetBucketWebsite(HttpServletRequest request, HttpServletResponse response) throws IOException { response.setStatus(405); } public void executeDeleteBucketWebsite(HttpServletRequest request, HttpServletResponse response) throws IOException { response.setStatus(405); } public void executePutBucket(HttpServletRequest request, HttpServletResponse response) throws IOException { int contentLength = request.getContentLength(); Object objectInContent = null; if(contentLength > 0) { InputStream is = null; try { is = request.getInputStream(); String xml = StringHelper.stringFromStream(is); Class.forName("com.cloud.bridge.service.core.s3.S3CreateBucketConfiguration"); XSerializer serializer = new XSerializer(new XSerializerXmlAdapter()); objectInContent = serializer.serializeFrom(xml); if(objectInContent != null && !(objectInContent instanceof S3CreateBucketConfiguration)) { throw new InvalidRequestContentException("Invalid request content in create-bucket: " + xml); } is.close(); } catch (IOException e) { logger.error("Unable to read request data due to " + e.getMessage(), e); throw new NetworkIOException(e); } catch (ClassNotFoundException e) { logger.error("In a normal world this should never never happen:" + e.getMessage(), e); throw new RuntimeException("A required class was not found in the classpath:" + e.getMessage()); } finally { if(is != null) is.close(); } } S3CreateBucketRequest engineRequest = new S3CreateBucketRequest(); engineRequest.setBucketName((String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY)); engineRequest.setConfig((S3CreateBucketConfiguration)objectInContent); try { S3CreateBucketResponse engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest(engineRequest); response.addHeader("Location", "/" + engineResponse.getBucketName()); response.setContentLength(0); response.setStatus(200); response.flushBuffer(); } catch (ObjectAlreadyExistsException oaee) { response.setStatus(409); String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?> <Error><Code>OperationAborted</Code><Message>A conflicting conditional operation is currently in progress against this resource. Please try again..</Message>"; response.setContentType("text/xml; charset=UTF-8"); S3RestServlet.endResponse(response, xml.toString()); } } public void executePutBucketAcl(HttpServletRequest request, HttpServletResponse response) throws IOException { // [A] Determine that there is an applicable bucket which might have an ACL set String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); SBucketDao bucketDao = new SBucketDao(); SBucket bucket = bucketDao.getByName( bucketName ); String owner = null; if ( null != bucket ) owner = bucket.getOwnerCanonicalId(); if (null == owner) { logger.error( "ACL update failed since " + bucketName + " does not exist" ); throw new IOException("ACL update failed"); } // [B] Obtain the grant request which applies to the acl request string. This latter is supplied as the value of the x-amz-acl header. S3SetBucketAccessControlPolicyRequest engineRequest = new S3SetBucketAccessControlPolicyRequest(); S3Grant grantRequest = new S3Grant(); S3AccessControlList aclRequest = new S3AccessControlList(); String aclRequestString = request.getHeader("x-amz-acl"); OrderedPair <Integer,Integer> accessControlsForBucketOwner = SAcl.getCannedAccessControls(aclRequestString,"SBucket"); grantRequest.setPermission(accessControlsForBucketOwner.getFirst()); grantRequest.setGrantee(accessControlsForBucketOwner.getSecond()); grantRequest.setCanonicalUserID(owner); aclRequest.addGrant(grantRequest); engineRequest.setAcl(aclRequest); engineRequest.setBucketName(bucketName); // [C] Allow an S3Engine to handle the S3SetBucketAccessControlPolicyRequest S3Response engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest(engineRequest); response.setStatus( engineResponse.getResultCode()); } public void executePutBucketVersioning(HttpServletRequest request, HttpServletResponse response) throws IOException { String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); String versioningStatus = null; Node item = null; if (null == bucketName) { logger.error( "executePutBucketVersioning - no bucket name given" ); response.setStatus( 400 ); return; } // -> is the XML as defined? try { DocumentBuilder db = dbf.newDocumentBuilder(); Document restXML = db.parse( request.getInputStream()); NodeList match = S3RestServlet.getElement( restXML, "http://s3.amazonaws.com/doc/2006-03-01/", "Status" ); if ( 0 < match.getLength()) { item = match.item(0); versioningStatus = new String( item.getFirstChild().getNodeValue()); } else { logger.error( "executePutBucketVersioning - cannot find Status tag in XML body" ); response.setStatus( 400 ); return; } } catch( Exception e ) { logger.error( "executePutBucketVersioning - failed to parse XML due to " + e.getMessage(), e); response.setStatus(400); return; } try { // Irrespective of what the ACLs say only the owner can turn on versioning on a bucket. // The bucket owner may want to restrict the IP address from which this can occur. SBucketDao bucketDao = new SBucketDao(); SBucket sbucket = bucketDao.getByName( bucketName ); String client = UserContext.current().getCanonicalUserId(); if (!client.equals( sbucket.getOwnerCanonicalId())) throw new PermissionDeniedException( "Access Denied - only the owner can turn on versioing on a bucket" ); S3PolicyContext context = new S3PolicyContext( PolicyActions.PutBucketVersioning, bucketName ); if (PolicyAccess.DENY == S3Engine.verifyPolicy( context )) { response.setStatus(403); return; } if (versioningStatus.equalsIgnoreCase( "Enabled" )) sbucket.setVersioningStatus( 1 ); else if (versioningStatus.equalsIgnoreCase( "Suspended")) sbucket.setVersioningStatus( 2 ); else { logger.error( "executePutBucketVersioning - unknown state: [" + versioningStatus + "]" ); response.setStatus( 400 ); return; } bucketDao.update( sbucket ); } catch( PermissionDeniedException e ) { logger.error( "executePutBucketVersioning - failed due to " + e.getMessage(), e); throw e; } catch( Exception e ) { logger.error( "executePutBucketVersioning - failed due to " + e.getMessage(), e); response.setStatus(500); return; } response.setStatus(200); } public void executePutBucketLogging(HttpServletRequest request, HttpServletResponse response) throws IOException { // TODO -- Review this in future. Currently this is a S3 beta feature response.setStatus(501); } public void executePutBucketWebsite(HttpServletRequest request, HttpServletResponse response) throws IOException { // TODO -- LoPri - Undertake checks on Put Bucket Website // Tested using configuration <Directory /Users/john1/S3-Mount>\nAllowOverride FileInfo AuthConfig Limit...</Directory> in httpd.conf // Need some way of using AllowOverride to allow use of .htaccess and then pushing .httaccess file to bucket subdirectory of mount point // Currently has noop effect in the sense that a running apachectl process sees the directory contents without further action response.setStatus(200); } public void executeDeleteBucket(HttpServletRequest request, HttpServletResponse response) throws IOException { S3DeleteBucketRequest engineRequest = new S3DeleteBucketRequest(); engineRequest.setBucketName((String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY)); S3Response engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest(engineRequest); response.setStatus(engineResponse.getResultCode()); response.flushBuffer(); } /** * Multipart upload is a complex operation with all the options defined by Amazon. Part of the functionality is * provided by the query done against the database. The CommonPrefixes functionality is done the same way * as done in the listBucketContents function (i.e., by iterating though the list to decide which output * element each key is placed). * * @param request * @param response * @throws IOException */ public void executeListMultipartUploads(HttpServletRequest request, HttpServletResponse response) throws IOException { // [A] Obtain parameters and do basic bucket verification String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); String delimiter = request.getParameter("delimiter"); String keyMarker = request.getParameter("key-marker"); String prefix = request.getParameter("prefix"); int maxUploads = 1000; int nextUploadId = 0; String nextKey = null; boolean isTruncated = false; S3MultipartUpload[] uploads = null; S3MultipartUpload onePart = null; String temp = request.getParameter("max-uploads"); if (null != temp) { maxUploads = Integer.parseInt( temp ); if (maxUploads > 1000 || maxUploads < 0) maxUploads = 1000; } // -> upload-id-marker is ignored unless key-marker is also specified String uploadIdMarker = request.getParameter("upload-id-marker"); if (null == keyMarker) uploadIdMarker = null; // -> does the bucket exist, we may need it to verify access permissions SBucketDao bucketDao = new SBucketDao(); SBucket bucket = bucketDao.getByName(bucketName); if (bucket == null) { logger.error( "listMultipartUpload failed since " + bucketName + " does not exist" ); response.setStatus(404); return; } S3PolicyContext context = new S3PolicyContext( PolicyActions.ListBucketMultipartUploads, bucketName ); context.setEvalParam( ConditionKeys.Prefix, prefix ); context.setEvalParam( ConditionKeys.Delimiter, delimiter ); S3Engine.verifyAccess( context, "SBucket", bucket.getId(), SAcl.PERMISSION_READ ); // [B] Query the multipart table to get the list of current uploads try { MultipartLoadDao uploadDao = new MultipartLoadDao(); OrderedPair<S3MultipartUpload[],Boolean> result = uploadDao.getInitiatedUploads( bucketName, maxUploads, prefix, keyMarker, uploadIdMarker ); uploads = result.getFirst(); isTruncated = result.getSecond().booleanValue(); } catch( Exception e ) { logger.error("List Multipart Uploads failed due to " + e.getMessage(), e); response.setStatus(500); } StringBuffer xml = new StringBuffer(); xml.append( "<?xml version=\"1.0\" encoding=\"utf-8\"?>" ); xml.append( "<ListMultipartUploadsResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">" ); xml.append( "<Bucket>" ).append( bucketName ).append( "</Bucket>" ); xml.append( "<KeyMarker>").append((null == keyMarker ? "" : keyMarker)).append( "</KeyMarker>" ); xml.append( "<UploadIdMarker>").append((null == uploadIdMarker ? "" : uploadIdMarker)).append( "</UploadIdMarker>" ); // [C] Construct the contents of the <Upload> element StringBuffer partsList = new StringBuffer(); for( int i=0; i < uploads.length; i++ ) { onePart = uploads[i]; if (null == onePart) break; if (delimiter != null && !delimiter.isEmpty()) { // -> is this available only in the CommonPrefixes element? if (StringHelper.substringInBetween(onePart.getKey(), prefix, delimiter) != null) continue; } nextKey = onePart.getKey(); nextUploadId = onePart.getId(); partsList.append( "<Upload>" ); partsList.append( "<Key>" ).append( nextKey ).append( "</Key>" ); partsList.append( "<UploadId>" ).append( nextUploadId ).append( "</UploadId>" ); partsList.append( "<Initiator>" ); partsList.append( "<ID>" ).append( onePart.getAccessKey()).append( "</ID>" ); partsList.append( "<DisplayName></DisplayName>" ); partsList.append( "</Initiator>" ); partsList.append( "<Owner>" ); partsList.append( "<ID>" ).append( onePart.getAccessKey()).append( "</ID>" ); partsList.append( "<DisplayName></DisplayName>" ); partsList.append( "</Owner>" ); partsList.append( "<StorageClass>STANDARD</StorageClass>" ); partsList.append( "<Initiated>" ).append( DatatypeConverter.printDateTime( onePart.getLastModified())).append( "</Initiated>" ); partsList.append( "</Upload>" ); } // [D] Construct the contents of the <CommonPrefixes> elements (if any) for( int i=0; i < uploads.length; i++ ) { onePart = uploads[i]; if (null == onePart) break; if (delimiter != null && !delimiter.isEmpty()) { String subName = StringHelper.substringInBetween(onePart.getKey(), prefix, delimiter); if (subName != null) { partsList.append( "<CommonPrefixes>" ); partsList.append( "<Prefix>" ); if ( prefix != null && prefix.length() > 0 ) partsList.append( prefix + delimiter + subName ); else partsList.append( subName ); partsList.append( "</Prefix>" ); partsList.append( "</CommonPrefixes>" ); } } } // [D] Finish off the response xml.append( "<NextKeyMarker>" ).append((null == nextKey ? "" : nextKey)).append( "</NextKeyMarker>" ); xml.append( "<NextUploadIdMarker>" ).append((0 == nextUploadId ? "" : nextUploadId)).append( "</NextUploadIdMarker>" ); xml.append( "<MaxUploads>" ).append( maxUploads ).append( "</MaxUploads>" ); xml.append( "<IsTruncated>" ).append( isTruncated ).append( "</IsTruncated>" ); xml.append( partsList.toString()); xml.append( "</ListMultipartUploadsResult>" ); response.setStatus(200); response.setContentType("text/xml; charset=UTF-8"); S3RestServlet.endResponse(response, xml.toString()); } private String streamToString( InputStream is ) throws IOException { int n = 0; if ( null != is ) { Writer writer = new StringWriter(); char[] buffer = new char[1024]; try { Reader reader = new BufferedReader( new InputStreamReader(is, "UTF-8")); while ((n = reader.read(buffer)) != -1) writer.write(buffer, 0, n); } finally { is.close(); } return writer.toString(); } else return null; } }
/***************************************************************** <copyright> Morozko Java Library org.morozko.java.mod.daogen Copyright (c) 2006 Morozko All rights reserved. This program and the accompanying materials are made available under the terms of the Apache License v2.0 which accompanies this distribution, and is available at http://www.apache.org/licenses/ (txt version : http://www.apache.org/licenses/LICENSE-2.0.txt html version : http://www.apache.org/licenses/LICENSE-2.0.html) This product includes software developed at The Apache Software Foundation (http://www.apache.org/). </copyright> *****************************************************************/ /* * @(#)DAOCoder.java * * @project : org.morozko.java.mod.daogen * @package : org.morozko.java.mod.daogen.gen.coder * @creation : 13-apr-2006 */ package org.morozko.java.mod.daogen.gen.coder; import java.io.IOException; import java.io.PrintStream; import java.util.Enumeration; import java.util.Iterator; import java.util.List; import java.util.Properties; import org.morozko.java.core.log.LogFacade; import org.morozko.java.mod.daogen.gen.config.DGConfig; import org.morozko.java.mod.daogen.gen.config.FieldConfig; import org.morozko.java.mod.daogen.gen.config.IdGeneratorConfig; import org.morozko.java.mod.daogen.gen.config.LoadConfig; import org.morozko.java.mod.daogen.gen.config.OperationConfig; import org.morozko.java.mod.daogen.gen.config.RelationConfig; import org.morozko.java.mod.daogen.gen.config.TableConfig; /** * <p></p> * * @author mfranci * */ public class LegacyDAOCoder extends Coder { private static String prepareTableName( String tn) throws IOException { String result = ""; if ( tn != null ) { int index = tn.indexOf( "\"" ); while ( index != -1 ) { result+= tn.substring( 0, index )+"\\\""; tn = tn.substring( index+1 ); index = tn.indexOf( "\"" ); } result+= tn; } return result; } private static String whereUtil( List fieldName, TableConfig tableConfig ) { String sql = " "; Iterator it = fieldName.iterator(); boolean first = true; while ( it.hasNext() ) { FieldConfig fieldConfig = tableConfig.getFieldConfig( ( String )it.next() ); if ( first ) { first = false; } else { sql+= " AND "; } sql+= "v."+fieldConfig.getFieldName()+"=? "; } return sql; } public static void generate( PrintStream stream, DGConfig dgConfig, TableConfig tableConfig ) throws Exception { Properties generalProps = dgConfig.getGeneralProps(); boolean jvm5 = "5".equalsIgnoreCase( dgConfig.getGeneralProps().getProperty( "daogen.target.vm" ) ); String name = tableConfig.getTableName(); String pName = generalProps.getProperty( "package.dao" )+".helpers"; String cName = name+"DAOHelper"; String modelName = name+"Model"; String rseName = modelName+"RSEHelper"; String moduleDaoFactory = null; if ( generalProps.getProperty( "factory.dao.module" ) != null ) { moduleDaoFactory = generalProps.getProperty( "factory.dao.module" )+"Helper"; } String resultList = "List"; String newList = "this.newList()"; String loadAppend = ""; if ( jvm5 ) { resultList+= "<"+modelName+">"; newList = "new java.util.ArrayList<"+modelName+">()"; //loadAppend = "J5"; } addClassComment( stream, cName, pName, dgConfig ); stream.println( "package "+pName+";" ); stream.println(); // import START stream.println( "import java.util.List;" ); stream.println( "import org.morozko.java.core.util.result.PagedResult;" ); stream.println( "import org.morozko.java.mod.db.dao.PageInfoDB;" ); stream.println( "import "+generalProps.getProperty( "package.model" )+"."+modelName+";" ); stream.println( "import "+generalProps.getProperty( "field.type" )+";" ); stream.println( "import "+generalProps.getProperty( "class.opdao" )+";" ); stream.println( "import "+generalProps.getProperty( "field.list.type" )+";" ); stream.println( "import "+generalProps.getProperty( "load.result.type" )+";" ); stream.println( "import "+generalProps.getProperty( "dao.exception.type" )+";" ); stream.println( "import "+generalProps.getProperty( "interface.idg" )+";" ); stream.println(); // import END // inizio classe START addTypeComment( stream, "Classe per la gestione dell' accesso al DB per oggetti di "+name+"Model", dgConfig ); stream.println( "public class "+cName+" extends "+generalProps.getProperty( "superclass.dao" )+" {" ); stream.println(); // inizio classe END // serial ver UID START stream.println(" private final static long serialVersionUID = "+System.currentTimeMillis()+""+(int)(Math.random()*100)+"L;" ); stream.println(); // serial ver UID END // variabili e costanti START stream.println( " protected static final "+rseName+" RSE_MAIN = new "+rseName+"();" ); stream.println(); stream.println( " protected static final String QUERY_VIEW = \""+prepareTableName( tableConfig.getTableViewSQL() )+"\";" ); stream.println(); stream.println( " protected String queryView;" ); String update = tableConfig.getTableUpdateSQL(); stream.println(); stream.println( " protected static final String SQL_UPDATE = \""+prepareTableName( update )+"\";" ); stream.println(); stream.println( " protected String sqlUpdate;" ); stream.println(); stream.println(); stream.println( " public String getQueryViewString() { return QUERY_VIEW; }"); stream.println( " public String getSqlUpdateString() { return SQL_UPDATE; }"); stream.println(); // variabili e costanti END // module dao factory start if ( moduleDaoFactory != null ) { stream.println( " private "+moduleDaoFactory+" moduleDaoFactory;" ); stream.println(); stream.println( makeSetterWorker( "moduleDaoFactory", moduleDaoFactory ) ); stream.println(); stream.println( makeGetterWorker( "moduleDaoFactory", moduleDaoFactory ) ); stream.println(); stream.println(); } // module dao factory end // id generator START if ( tableConfig.getIdGenerator() != null ) { IdGeneratorConfig idGeneratorConfig = (IdGeneratorConfig)dgConfig.getIdGeneratorMap().get( tableConfig.getIdGenerator() ); stream.println(); stream.println( " // id generator START" ); stream.println(); stream.println( " private IdGenerator idGenerator;" ); stream.println(); stream.println( " public "+dgConfig.getGeneralProps().getProperty( "class.daoid" )+" generateId() throws DAOException {" ); stream.println( " if ( this.idGenerator == null ) { " ); stream.println( " try {" ); stream.println( " java.util.Properties props = new java.util.Properties();" ); Enumeration e = idGeneratorConfig.getConfig().keys(); while ( e.hasMoreElements() ) { String key = (String)e.nextElement(); String value = idGeneratorConfig.getConfig().getProperty( key ); stream.println( " props.setProperty( "+stringValue( key )+","+stringValue( value )+" );" ); stream.println( " this.idGenerator = (IdGenerator)org.morozko.java.core.lang.helpers.ClassHelper.newInstance( "+stringValue( idGeneratorConfig.getType() )+" );" ); stream.println( " this.idGenerator.setConnectionFactory( this.getMainDAOFactory().getConnectionFactory() ); " ); stream.println( " this.idGenerator.configure( props ); " ); } stream.println( " } catch (Exception e) {" ); stream.println( " e.printStackTrace();" ); stream.println( " }" ); stream.println( " } " ); stream.println( " return this.idGenerator.generateId();" ); stream.println( " }" ); stream.println(); stream.println( " // id generator END" ); stream.println(); } // id generator END // load by pk START stream.println(); String[] keyList = tableConfig.getUpdateKey(); if (keyList != null && keyList.length > 0) { stream.print( " protected "+modelName+" loadByPkWorker( Object "+tableConfig.getFieldConfig( keyList[0] ).getJavaFieldName()+" " ); for (int k=1; k<keyList.length; k++) { //System.out.println( "FieldName : "+keyList[k]+" : "+tableConfig.getFieldConfig( keyList[k] )+" "+tableConfig.getTableName() ); stream.print( " , Object "+tableConfig.getFieldConfig( keyList[k] ).getJavaFieldName() ); } stream.println( " ) throws DAOException { " ); stream.print( " String query = \"SELECT * FROM ( \"+this.queryView+\") v WHERE "+keyList[0]+" = ?" ); for (int k=1; k<keyList.length; k++) { stream.print( " AND "+keyList[k]+" = ? " ); } stream.println( " \";" ); stream.println( " FieldList fl = this.newFieldList(); " ); for (int k=0; k<keyList.length; k++) { stream.println( " fl.addField( this.getFieldFactory().newField( "+tableConfig.getFieldConfig( keyList[0] ).getJavaFieldName()+" ) ); " ); } stream.println( " return ( "+modelName+" ) this.loadOne( query, fl );" ); stream.println( " } " ); } // load by pk END // delete by pk START if (keyList != null && keyList.length > 0) { stream.print( " public int deleteByPk( "+tableConfig.getFieldConfig( keyList[0] ).getJavaFieldType()+" "+tableConfig.getFieldConfig( keyList[0] ).getJavaFieldName()+" " ); for (int k=1; k<keyList.length; k++) { stream.print( " , "+tableConfig.getFieldConfig( keyList[k] ).getJavaFieldType()+" "+tableConfig.getFieldConfig( keyList[k] ).getJavaFieldName() ); } stream.println( " ) throws DAOException { " ); stream.print( " String query = \"DELETE FROM \"+this.sqlUpdate+\" WHERE "+keyList[0]+" = ?" ); for (int k=1; k<keyList.length; k++) { stream.print( " AND "+keyList[k]+" = ? " ); } stream.println( " \";" ); stream.println( " FieldList fl = this.newFieldList(); " ); for (int k=0; k<keyList.length; k++) { System.out.println( "KEY : "+keyList[k] ); stream.println( " fl.addField( this.getFieldFactory().newField( "+tableConfig.getFieldConfig( keyList[k] ).getJavaFieldName()+" ) ); " ); } stream.println( " return this.update( query, fl );" ); stream.println( " } " ); } // delete by pk END // operation list start Iterator itOperations = tableConfig.getOperationsList().iterator(); while ( itOperations.hasNext() ) { OperationConfig operationConfig = (OperationConfig)itOperations.next(); if ( operationConfig.isTypeDelete() ) { FieldConfig field0 = tableConfig.getFieldConfig( (String)operationConfig.getFieldList().get( 0 ) ); stream.print( " public int delete"+operationConfig.getName()+"( "+field0.getJavaFieldType()+" "+field0.getJavaFieldName()+" " ); String queryDelete = " String query = \"DELETE FROM \"+this.sqlUpdate+\" WHERE "+field0.getJavaFieldName()+" = ?"; for (int k=1; k<operationConfig.getFieldList().size(); k++) { FieldConfig fieldCurrent = tableConfig.getFieldConfig( (String)operationConfig.getFieldList().get( k ) ); stream.print( " , "+fieldCurrent.getJavaFieldType()+" "+fieldCurrent.getJavaFieldName() ); queryDelete+= " AND "+fieldCurrent.getJavaFieldName()+" = ? "; } stream.println( " ) throws DAOException { " ); stream.println( queryDelete+" \";" ); stream.println( " FieldList fl = this.newFieldList(); " ); for (int k=0; k<operationConfig.getFieldList().size(); k++) { FieldConfig fieldCurrent = tableConfig.getFieldConfig( (String)operationConfig.getFieldList().get( k ) ); stream.println( " fl.addField( this.getFieldFactory().newField( "+fieldCurrent.getJavaFieldName()+" ) ); " ); } stream.println( " return this.update( query, fl );" ); stream.println( " } " ); } } // operation list end // update / insert inizio if ( update != null ) { // inserimento per muysql (salto il primo campo) if ( tableConfig.getFieldsUpdate().size() > 2 ) { FieldConfig field1 = (FieldConfig)tableConfig.getFieldsUpdate().get( 1 ); stream.println( " public OpDAO newInsertOpDAOMysql( "+modelName+" model ) throws DAOException { " ); String insQueryPart1 = field1.getFieldName(); String insQueryPart2 = " ? "; String fieldSet = " fl.addField( this.getFieldFactory().newField( model.get"+methodName( field1.getFieldName() )+"(), "+field1.getFieldType().getSqlType().intValue()+" ) );\n"; for (int k=2; k<tableConfig.getFieldsUpdate().size(); k++) { FieldConfig current = (FieldConfig)tableConfig.getFieldsUpdate().get( k ); insQueryPart1+= ", "+current.getFieldName(); insQueryPart2+= ", ?"; fieldSet+= " fl.addField( model.get"+methodName( current.getFieldName() )+"(), "+current.getFieldType().getSqlType().intValue()+" );\n"; } stream.println( " String query = \"INSERT INTO \"+this.sqlUpdate+\" ( "+insQueryPart1+" ) VALUES ( "+insQueryPart2+" )\";" ); stream.println( " FieldList fl = this.newFieldList();" ); stream.print( fieldSet ); stream.println( " return OpDAO.newUpdateOp( query, fl );" ); stream.println( " }" ); stream.println( " public int newInsertMysql( "+modelName+" model ) throws DAOException { " ); stream.println( " int result = this.update( this.newInsertOpDAO( model ) );" ); stream.println( " return result;" ); stream.println( " }" ); } FieldConfig field1 = (FieldConfig)tableConfig.getFieldsUpdate().get( 0 ); stream.println( " public OpDAO newInsertOpDAO( "+modelName+" model ) throws DAOException { " ); String insQueryPart1 = field1.getFieldName(); String insQueryPart2 = " ? "; String fieldSet = " fl.addField( this.getFieldFactory().newField( model.get"+methodName( field1.getFieldName() )+"(), "+field1.getFieldType().getSqlType().intValue()+" ) );\n"; for (int k=1; k<tableConfig.getFieldsUpdate().size(); k++) { FieldConfig current = (FieldConfig)tableConfig.getFieldsUpdate().get( k ); insQueryPart1+= ", "+current.getFieldName(); insQueryPart2+= ", ?"; fieldSet+= " fl.addField( model.get"+methodName( current.getFieldName() )+"(), "+current.getFieldType().getSqlType().intValue()+" );\n"; } stream.println( " String query = \"INSERT INTO \"+this.sqlUpdate+\" ( "+insQueryPart1+" ) VALUES ( "+insQueryPart2+" )\";" ); stream.println( " FieldList fl = this.newFieldList();" ); stream.print( fieldSet ); stream.println( " return OpDAO.newUpdateOp( query, fl );" ); stream.println( " }" ); stream.println( " public int insert( "+modelName+" model ) throws DAOException { " ); stream.println( " return this.update( this.newInsertOpDAO( model ) );" ); stream.println( " }" ); if (keyList != null && keyList.length > 0 ) { // codice update stream.println( " public OpDAO newUpdateOpDAO( "+modelName+" model ) throws DAOException { " ); String upQueryPart1 = "SET "+ field1.getFieldName() +"=? "; for (int k=1; k<tableConfig.getFieldsUpdate().size(); k++) { FieldConfig current = (FieldConfig)tableConfig.getFieldsUpdate().get( k ); String colName = current.getFieldName(); upQueryPart1+= ", "+colName+" = ?"; } for (int k=0; k<keyList.length; k++) { fieldSet+= " fl.addField( model.get"+methodName( keyList[k] )+"(), "+tableConfig.getFieldConfig( keyList[k] ).getFieldType().getSqlType().intValue()+" );\n"; } stream.print( " String query = \"UPDATE \"+this.sqlUpdate+\" "+upQueryPart1+" WHERE "+keyList[0]+"=? " ); for (int k=1; k<keyList.length; k++) { stream.print( " AND "+keyList[k]+"=? " ); } stream.println( " \";" ); stream.println( " FieldList fl = this.newFieldList();" ); stream.print( fieldSet ); stream.println( " return OpDAO.newUpdateOp( query, fl );" ); stream.println( " }" ); stream.println( ); stream.println( " public int update( "+modelName+" model ) throws DAOException { " ); stream.println( " return this.update( newUpdateOpDAO( model ) );" ); stream.println( " }" ); } } // update / insert fine stream.println(); stream.println( " public "+modelName+" loadOne( String sql, FieldList fl ) throws DAOException {" ); stream.println( " return ("+modelName+")loadOne( sql, fl, RSE_MAIN );" ); stream.println( " }" ); stream.println( " public "+modelName+" loadOne( String sql, Field f ) throws DAOException {" ); stream.println( " return ("+modelName+")loadOne( sql, f, RSE_MAIN );" ); stream.println( " }" ); stream.println( " protected void loadAll( "+resultList+" list, String sql, FieldList fl ) throws DAOException {" ); stream.println( " this.loadAll"+loadAppend+"( list, sql, fl, RSE_MAIN );" ); stream.println( " }" ); stream.println( " protected void loadAll( "+resultList+" list, String sql, Field f ) throws DAOException {" ); stream.println( " this.loadAll"+loadAppend+"( list, sql, f, RSE_MAIN );" ); stream.println( " }" ); stream.println( " protected void loadAll( "+resultList+" list, String sql ) throws DAOException {" ); stream.println( " this.loadAll"+loadAppend+"( list, sql, this.newFieldList(), RSE_MAIN );" ); stream.println( " }" ); stream.println( " public void loadAll( "+resultList+" list ) throws DAOException {" ); stream.println( " String sql = this.queryView; " ); if ( tableConfig.getOrderBy() != null ) { stream.println( " sql+=\" ORDER BY "+tableConfig.getOrderBy()+" \";" ); } stream.println( " this.loadAll"+loadAppend+"( list, sql, this.newFieldList(), RSE_MAIN );" ); stream.println( " }" ); stream.println( " public List loadAll() throws DAOException {" ); stream.println( " "+resultList+" list = "+newList+";" ); stream.println( " this.loadAll( list );" ); stream.println( " return list;" ); stream.println( " }" ); stream.println( " public LoadResult loadAllResult() throws DAOException {" ); stream.println( " String sql = this.queryView; " ); if ( tableConfig.getOrderBy() != null ) { stream.println( " sql+=\" ORDER BY "+tableConfig.getOrderBy()+" \";" ); } stream.println( " return LoadResult.initResult( this, sql, this.newFieldList(), RSE_MAIN ) ;" ); stream.println( " }" ); stream.println( " public PagedResult loadAllPaged( int perPage, int loadPage ) throws DAOException {" ); stream.println( " return this.loadAllPaged( this.queryView, this.newFieldList(), RSE_MAIN, new PageInfoDB( loadPage, perPage ) );" ); stream.println( " }" ); stream.println( " public PagedResult loadAllPaged( int perPage, int loadPage, String orderBy ) throws DAOException {" ); stream.println( " return this.loadAllPaged( this.queryView, this.newFieldList(), RSE_MAIN, new PageInfoDB( loadPage, perPage, orderBy ) );" ); stream.println( " }" ); stream.println(); stream.println( " public "+generalProps.getProperty( "factory.dao" )+" getMainDAOFactory() {" ); stream.println( " return ("+generalProps.getProperty( "factory.dao" )+")this.getDaoFactory();" ); stream.println( " }" ); // costruttore inizio stream.println( " public "+cName+"("+generalProps.getProperty( "factory.dao" )+" daoFactory, String queryView, String sqlUpdate ) {" ); stream.println( " super(daoFactory);" ); stream.println( " this.init(daoFactory);" ); stream.println( " this.queryView = queryView;" ); if ( update!=null ) { stream.println( " this.sqlUpdate = sqlUpdate;" ); } stream.println( " }" ); stream.println( " public "+cName+"("+generalProps.getProperty( "factory.dao" )+" daoFactory ) {" ); stream.println( " this( daoFactory, QUERY_VIEW, SQL_UPDATE );" ); stream.println( " }" ); // costruttore fine // inizio relazioni IN // carica tutte le relazioni su una lista di oggetti stream.println( " protected void loadAllRelations( List list ) throws DAOException { " ); stream.println( " for ( int k=0; k<list.size(); k++) { " ); stream.println( " this.loadAllRelations( ("+modelName+")list.get(k) ); " ); stream.println( " } " ); stream.println( " }" ); stream.println( ); stream.println( " public void loadAllRelations("+modelName+" model) throws DAOException {" ); for ( int k=0; k< tableConfig.getRelations().size(); k++ ) { RelationConfig relationConfig = (RelationConfig)tableConfig.getRelations().get( k ); stream.println( " this.loadRelation"+upFirst( relationConfig.getName() )+"( model );" ); } stream.println( ); stream.println( " }" ); for ( int k=0; k< tableConfig.getRelations().size(); k++ ) { RelationConfig relationConfig = (RelationConfig)tableConfig.getRelations().get( k ); stream.println( " public void loadRelation"+upFirst( relationConfig.getName() )+"( "+modelName+" model ) throws DAOException {" ); String checkField = ""; Iterator fieldIt = relationConfig.getFieldList().iterator(); while ( fieldIt.hasNext() ) { String field = (String)fieldIt.next(); checkField+= " && model.get"+methodName( field )+"() != null "; } stream.println( " if ( model != null "+checkField+" ) {" ); String relDAOType = generalProps.getProperty( "package.dao" )+"."+relationConfig.getTable()+"DAO"; String relDAOVar = lowFirst( relationConfig.getName()+"DAO" ); String relModelType = generalProps.getProperty( "package.model" )+"."+relationConfig.getTable()+"Model"; String relModelVar = lowFirst( relationConfig.getName() ); if ( relationConfig.isTypeOneToMany() ) { relModelType = "java.util.List"; } stream.println( " "+relDAOType+" "+relDAOVar+" = this.getModuleDaoFactory().get"+relationConfig.getTable()+"DAO();" ); stream.println( " "+relModelType+" "+relModelVar+" = "+relDAOVar+".outRelation"+upFirst( relModelVar )+"( model );" ); stream.println( " model.set"+upFirst( relModelVar )+"("+relModelVar+");" ); stream.println( " }" ); stream.println( " }" ); } stream.println( ); // fine relazioni IN // inizio relazioni OUT for ( int k=0; k< tableConfig.getRelationsOut().size(); k++ ) { RelationConfig relationConfig = (RelationConfig)tableConfig.getRelationsOut().get( k ); String modelArgType = generalProps.getProperty( "package.model" )+"."+upFirst( relationConfig.getTableOut() )+"Model"; String retType = modelName; String retNew = "new "+modelName+"()"; if ( relationConfig.isTypeOneToMany() ) { retType = "java.util.List"; retNew = "this.newList()"; } stream.println( " public "+retType+" outRelation"+upFirst( relationConfig.getName() )+"( "+modelArgType+" model ) throws DAOException {" ); stream.println( " "+retType+" result = "+retNew+";" ); stream.println( " FieldList fl = this.newFieldList();" ); String sql = " \"SELECT v.* FROM ( "; if ( relationConfig.getSql() == null ) { sql+= "\"+this.queryView+\""; } else { sql+= relationConfig.getSql(); } sql+= " ) v WHERE 1=1 "; for ( int i=0; i<relationConfig.getFieldList().size(); i++ ) { String currentField = (String)relationConfig.getFieldList().get( i ); String currentFieldOut = (String)relationConfig.getFieldOutList().get( i ); stream.println( " fl.addField( model.get"+methodName( currentField )+"() );" ); sql+= " AND v."+currentFieldOut+" = ? "; } if ( relationConfig.getOrderBy() != null ) { sql+= " ORDER BY v."+relationConfig.getOrderBy()+" "; } sql+= "\""; stream.println( " String sql = "+sql+";" ); if ( relationConfig.isTypeOneToOne() ) { stream.println( " result = this.loadOne( sql, fl );" ); } else if ( relationConfig.isTypeOneToMany() ) { stream.println( " this.loadAll( result, sql, fl );" ); } if ( relationConfig.isCascade() ) { stream.println( " this.loadAllRelations( result );" ); } stream.println( " return result;" ); stream.println( " }" ); stream.println( ); } // fine relazioni OUT // loads List loadList = tableConfig.getLoadList(); Iterator loadIt = loadList.iterator(); while ( loadIt.hasNext() ) { LoadConfig loadConfig = (LoadConfig)loadIt.next(); LogFacade.getLog().debug( "DAOCoder.generate() : LoadConfig name : "+loadConfig.getName() ); LogFacade.getLog().debug( "DAOCoder.generate() : LoadConfig type : "+loadConfig.getType() ); LogFacade.getLog().debug( "DAOCoder.generate() : LoadConfig class : "+loadConfig.getClass() ); LogFacade.getLog().debug( "DAOCoder.generate() : LoadConfig fieldList : "+loadConfig.getFieldList() ); if ( loadConfig.isTypeAll() ) { stream.print( " public "+resultList+" load"+upFirst( loadConfig.getType() )+loadConfig.getName()+"( " ); for ( int k=0; k<loadConfig.getFieldList().size(); k++ ) { FieldConfig current = (FieldConfig)tableConfig.getFieldConfig( (String)loadConfig.getFieldList().get( k ) ); if ( k!=0 ) { stream.print( " , " ); } stream.print( current.getJavaFieldType()+" "+current.getFieldName() ); } stream.println( " ) throws DAOException { " ); stream.println( " "+resultList+" result = "+newList+";" ); stream.println( " String sql = \"SELECT v.* FROM (\"+this.queryView+\") v WHERE "+whereUtil( loadConfig.getFieldList(), tableConfig )+"\";" ); if ( tableConfig.getOrderBy() != null ) { stream.println( " sql+=\" ORDER BY "+tableConfig.getOrderBy()+" \";" ); } stream.println( " FieldList fl = this.newFieldList();" ); for ( int k=0; k<loadConfig.getFieldList().size(); k++ ) { stream.println( " fl.addField("+tableConfig.getFieldConfig( (String)loadConfig.getFieldList().get( k ) ).getFieldName()+");" ); } stream.println( " this.loadAll"+loadAppend+"( result, sql, fl );" ); if ( loadConfig.isRelations() ) { stream.println( " this.loadAllRelations( result );" ); } stream.println( " return result;" ); stream.println( " }" ); // load all result stream.print( " public LoadResult loadResult"+upFirst( loadConfig.getType() )+loadConfig.getName()+"( " ); for ( int k=0; k<loadConfig.getFieldList().size(); k++ ) { FieldConfig current = (FieldConfig)tableConfig.getFieldConfig( (String)loadConfig.getFieldList().get( k ) ); if ( k!=0 ) { stream.print( " , " ); } stream.print( current.getJavaFieldType()+" "+current.getFieldName() ); } stream.println( " ) throws DAOException { " ); stream.println( " String sql = \"SELECT v.* FROM (\"+this.queryView+\") v WHERE "+whereUtil( loadConfig.getFieldList(), tableConfig )+"\";" ); if ( tableConfig.getOrderBy() != null ) { stream.println( " sql+=\" ORDER BY "+tableConfig.getOrderBy()+" \";" ); } stream.println( " FieldList fl = this.newFieldList();" ); for ( int k=0; k<loadConfig.getFieldList().size(); k++ ) { stream.println( " fl.addField("+tableConfig.getFieldConfig( (String)loadConfig.getFieldList().get( k ) ).getFieldName()+");" ); } stream.println( " LoadResult result = this.loadAllResult( sql, fl, RSE_MAIN );" ); stream.println( " return result;" ); stream.println( " }" ); // load all paged result // stream.print( " public PagedResult loadPaged"+upFirst( loadConfig.getType() )+loadConfig.getName()+"( int perPage, int loadPage, " ); // for ( int k=0; k<loadConfig.getFieldList().size(); k++ ) { // FieldConfig current = (FieldConfig)tableConfig.getFieldConfig( (String)loadConfig.getFieldList().get( k ) ); // if ( k!=0 ) { // stream.print( " , " ); // } // stream.print( current.getJavaFieldType()+" "+current.getFieldName() ); // } // stream.println( " ) throws DAOException { " ); // stream.println( " FieldList fl = this.newFieldList();" ); // for ( int k=0; k<loadConfig.getFieldList().size(); k++ ) { // FieldConfig current = (FieldConfig)tableConfig.getFieldConfig( (String)loadConfig.getFieldList().get( k ) ); // stream.println( " fl.addField( "+current.getFieldName()+");" ); // } // stream.println( " return this.loadAllPaged( sql, fl, RSE_MAIN, new PageInfoDB( loadPage, perPage ) );" ); // stream.println( " }" ); } else if ( loadConfig.isTypeOne() ) { stream.print( " public "+modelName+" load"+upFirst( loadConfig.getType() )+loadConfig.getName()+"( " ); for ( int k=0; k<loadConfig.getFieldList().size(); k++ ) { FieldConfig current = (FieldConfig)tableConfig.getFieldConfig( (String)loadConfig.getFieldList().get( k ) ); if ( k!=0 ) { stream.print( " , " ); } LogFacade.getLog().debug( "DAOCoder.generate() : FieldConfig current : "+current ); stream.print( current.getJavaFieldType()+" "+current.getFieldName() ); } stream.println( " ) throws DAOException { " ); stream.println( " "+modelName+" result = null;" ); stream.println( " String sql = \"SELECT v.* FROM (\"+this.queryView+\") v WHERE "+whereUtil( loadConfig.getFieldList(), tableConfig )+"\";" ); stream.println( " FieldList fl = this.newFieldList();" ); for ( int k=0; k<loadConfig.getFieldList().size(); k++ ) { stream.println( " fl.addField("+tableConfig.getFieldConfig( (String)loadConfig.getFieldList().get( k ) ).getFieldName()+");" ); } stream.println( " result = this.loadOne( sql, fl );" ); if ( loadConfig.isRelations() ) { stream.println( " this.loadAllRelations( result );" ); } stream.println( " return result;" ); stream.println( " }" ); } } // loads stream.println( "}" ); } }
package org.cipres.treebase.service.study; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.cipres.treebase.domain.DomainHome; import org.cipres.treebase.domain.admin.User; import org.cipres.treebase.domain.admin.UserHome; import org.cipres.treebase.domain.admin.UserRole.TBPermission; import org.cipres.treebase.domain.study.Analysis; import org.cipres.treebase.domain.study.AnalysisHome; import org.cipres.treebase.domain.study.AnalysisService; import org.cipres.treebase.domain.study.AnalysisStepService; import org.cipres.treebase.domain.study.Study; import org.cipres.treebase.domain.study.Submission; import org.cipres.treebase.service.AbstractServiceImpl; /** * AnalsysisServiceImpl.java * * Created on Jun 6, 2006 * * @author lcchan * */ public class AnalysisServiceImpl extends AbstractServiceImpl implements AnalysisService { private AnalysisHome mAnalysisHome; private UserHome mUserHome; private AnalysisStepService mAnalysisStepService; /** * Return the mAnalysisStepService field. * * @return AnalysisStepService mmAnalysisStepService */ private AnalysisStepService getAnalysisStepService() { return mAnalysisStepService; } /** * Set the mAnalysisStepService field. */ public void setAnalysisStepService(AnalysisStepService pNewAnalysisStepService) { mAnalysisStepService = pNewAnalysisStepService; } /** * Return the AnalysisHome field. * * @return AnalysisHome mAnalysisHome */ private AnalysisHome getAnalysisHome() { return mAnalysisHome; } /** * Set the AnalysisHome field. */ public void setAnalysisHome(AnalysisHome pNewAnalysisHome) { mAnalysisHome = pNewAnalysisHome; } /** * * @see org.cipres.treebase.service.AbstractServiceImpl#getDomainHome() */ @Override protected DomainHome getDomainHome() { return getAnalysisHome(); } /** * * @see org.cipres.treebase.domain.study.AnalysisService#deleteAnalysis(org.cipres.treebase.domain.study.Analysis) */ public boolean deleteAnalysis(Analysis pAnalysis) { if (pAnalysis == null) { return false; } // Need to make sure the object belongs to the current session: // Analysis currentAnalysis = update(pAnalysis); Analysis currentAnalysis = pAnalysis; // manage bi-directional relationships: Study study = currentAnalysis.getStudy(); if (study != null) { study.removeAnalysis(currentAnalysis); } // Cascade delete: // * analysis step: getAnalysisStepService().deleteAnalysisSteps(currentAnalysis.getAnalysisStepsReadOnly()); getAnalysisHome().deletePersist(currentAnalysis); return true; } /** * * @see org.cipres.treebase.domain.study.AnalysisService#deleteAnalyses(java.util.Collection) */ public void deleteAnalyses(Collection<Analysis> pAnalyses) { if (pAnalyses == null || pAnalyses.isEmpty()) { return; } // make a copy to avoid the potential concurrent modification. Set<Analysis> copy = new HashSet<Analysis>(pAnalyses); for (Analysis analysis : copy) { deleteAnalysis(analysis); } } /** * * @see org.cipres.treebase.domain.study.AnalysisService#findByID(java.lang.Long) */ public Analysis findByID(Long pAnalysisID) { if (pAnalysisID == null) { return null; } return getAnalysisHome().findPersistedObjectByID(Analysis.class, pAnalysisID); } /** * * @see org.cipres.treebase.domain.study.AnalysisService#getPermission(java.lang.String, * java.lang.Long) */ public TBPermission getPermission(String pUsername, Long pAnalysisId) { Analysis a = findByID(pAnalysisId); // Long submissionID = null; Submission sub = null; if (a != null) { sub = a.getStudy().getSubmission(); } if (sub == null) { return TBPermission.NONE; } User user = getUserHome().findByUserName(pUsername); return sub.getPermission(user); } /** * * @return */ private UserHome getUserHome() { return mUserHome; } /** * Set the user home. * * @param pUserHome */ public void setUserHome(UserHome pUserHome) { mUserHome = pUserHome; } /** * * @see org.cipres.treebase.domain.study.AnalysisService#deleteAnalyses(org.cipres.treebase.domain.study.Study) */ public void deleteAnalyses(Study pStudy) { if (pStudy == null) { return; } // make a copy to avoid the potential concurrent modification. Collection<Analysis> copy = new ArrayList<Analysis>(pStudy.getAnalyses()); // Need to make sure the object belongs to the current session: // Analysis currentAnalysis = update(pAnalysis); for (Analysis analysis : copy) { // Cascade delete: // * analysis step: getAnalysisStepService().deleteAnalysisSteps(analysis.getAnalysisStepsReadOnly()); } // manage bi-directional relationships: pStudy.getAnalyses().clear(); getAnalysisHome().deleteAll(copy); } @Override public Class defaultResultClass() { return Analysis.class; } }
/* * Muhimbi PDF * Convert, Merge, Watermark, Secure and OCR files. * * OpenAPI spec version: 9.15 * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package com.muhimbi.online.client.api; import com.muhimbi.online.client.ApiCallback; import com.muhimbi.online.client.ApiClient; import com.muhimbi.online.client.ApiException; import com.muhimbi.online.client.ApiResponse; import com.muhimbi.online.client.Configuration; import com.muhimbi.online.client.Pair; import com.muhimbi.online.client.ProgressRequestBody; import com.muhimbi.online.client.ProgressResponseBody; import com.google.gson.reflect.TypeToken; import java.io.IOException; import com.muhimbi.online.client.model.ConvertData; import com.muhimbi.online.client.model.ConvertHtmlData; import com.muhimbi.online.client.model.OperationResponse; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class ConvertApi { private ApiClient apiClient; public ConvertApi() { this(Configuration.getDefaultApiClient()); } public ConvertApi(ApiClient apiClient) { this.apiClient = apiClient; } public ApiClient getApiClient() { return apiClient; } public void setApiClient(ApiClient apiClient) { this.apiClient = apiClient; } /* Build call for convert */ private com.squareup.okhttp.Call convertCall(ConvertData inputData, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { Object localVarPostBody = inputData; // create path and map variables String localVarPath = "/v1/operations/convert".replaceAll("\\{format\\}","json"); List<Pair> localVarQueryParams = new ArrayList<Pair>(); Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); final String[] localVarAccepts = { "application/json" }; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept); final String[] localVarContentTypes = { "application/json" }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); localVarHeaderParams.put("Content-Type", localVarContentType); if(progressListener != null) { apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() { @Override public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException { com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request()); return originalResponse.newBuilder() .body(new ProgressResponseBody(originalResponse.body(), progressListener)) .build(); } }); } String[] localVarAuthNames = new String[] { "oauth2_auth", "api_key" }; return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener); } @SuppressWarnings("rawtypes") private com.squareup.okhttp.Call convertValidateBeforeCall(ConvertData inputData, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { // verify the required parameter 'inputData' is set if (inputData == null) { throw new ApiException("Missing the required parameter 'inputData' when calling convert(Async)"); } com.squareup.okhttp.Call call = convertCall(inputData, progressListener, progressRequestListener); return call; } /** * Convert document * Convert a file to PDF or other format. * @param inputData (required) * @return OperationResponse * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public OperationResponse convert(ConvertData inputData) throws ApiException { ApiResponse<OperationResponse> resp = convertWithHttpInfo(inputData); return resp.getData(); } /** * Convert document * Convert a file to PDF or other format. * @param inputData (required) * @return ApiResponse&lt;OperationResponse&gt; * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public ApiResponse<OperationResponse> convertWithHttpInfo(ConvertData inputData) throws ApiException { com.squareup.okhttp.Call call = convertValidateBeforeCall(inputData, null, null); Type localVarReturnType = new TypeToken<OperationResponse>(){}.getType(); return apiClient.execute(call, localVarReturnType); } /** * Convert document (asynchronously) * Convert a file to PDF or other format. * @param inputData (required) * @param callback The callback to be executed when the API call finishes * @return The request call * @throws ApiException If fail to process the API call, e.g. serializing the request body object */ public com.squareup.okhttp.Call convertAsync(ConvertData inputData, final ApiCallback<OperationResponse> callback) throws ApiException { ProgressResponseBody.ProgressListener progressListener = null; ProgressRequestBody.ProgressRequestListener progressRequestListener = null; if (callback != null) { progressListener = new ProgressResponseBody.ProgressListener() { @Override public void update(long bytesRead, long contentLength, boolean done) { callback.onDownloadProgress(bytesRead, contentLength, done); } }; progressRequestListener = new ProgressRequestBody.ProgressRequestListener() { @Override public void onRequestProgress(long bytesWritten, long contentLength, boolean done) { callback.onUploadProgress(bytesWritten, contentLength, done); } }; } com.squareup.okhttp.Call call = convertValidateBeforeCall(inputData, progressListener, progressRequestListener); Type localVarReturnType = new TypeToken<OperationResponse>(){}.getType(); apiClient.executeAsync(call, localVarReturnType, callback); return call; } /* Build call for convertHtml */ private com.squareup.okhttp.Call convertHtmlCall(ConvertHtmlData inputData, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { Object localVarPostBody = inputData; // create path and map variables String localVarPath = "/v1/operations/convert_html".replaceAll("\\{format\\}","json"); List<Pair> localVarQueryParams = new ArrayList<Pair>(); Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); final String[] localVarAccepts = { "application/json" }; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept); final String[] localVarContentTypes = { "application/json" }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); localVarHeaderParams.put("Content-Type", localVarContentType); if(progressListener != null) { apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() { @Override public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException { com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request()); return originalResponse.newBuilder() .body(new ProgressResponseBody(originalResponse.body(), progressListener)) .build(); } }); } String[] localVarAuthNames = new String[] { "oauth2_auth", "api_key" }; return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener); } @SuppressWarnings("rawtypes") private com.squareup.okhttp.Call convertHtmlValidateBeforeCall(ConvertHtmlData inputData, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { // verify the required parameter 'inputData' is set if (inputData == null) { throw new ApiException("Missing the required parameter 'inputData' when calling convertHtml(Async)"); } com.squareup.okhttp.Call call = convertHtmlCall(inputData, progressListener, progressRequestListener); return call; } /** * Convert HTML to PDF * Convert URLs or HTML to PDF. * @param inputData (required) * @return OperationResponse * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public OperationResponse convertHtml(ConvertHtmlData inputData) throws ApiException { ApiResponse<OperationResponse> resp = convertHtmlWithHttpInfo(inputData); return resp.getData(); } /** * Convert HTML to PDF * Convert URLs or HTML to PDF. * @param inputData (required) * @return ApiResponse&lt;OperationResponse&gt; * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public ApiResponse<OperationResponse> convertHtmlWithHttpInfo(ConvertHtmlData inputData) throws ApiException { com.squareup.okhttp.Call call = convertHtmlValidateBeforeCall(inputData, null, null); Type localVarReturnType = new TypeToken<OperationResponse>(){}.getType(); return apiClient.execute(call, localVarReturnType); } /** * Convert HTML to PDF (asynchronously) * Convert URLs or HTML to PDF. * @param inputData (required) * @param callback The callback to be executed when the API call finishes * @return The request call * @throws ApiException If fail to process the API call, e.g. serializing the request body object */ public com.squareup.okhttp.Call convertHtmlAsync(ConvertHtmlData inputData, final ApiCallback<OperationResponse> callback) throws ApiException { ProgressResponseBody.ProgressListener progressListener = null; ProgressRequestBody.ProgressRequestListener progressRequestListener = null; if (callback != null) { progressListener = new ProgressResponseBody.ProgressListener() { @Override public void update(long bytesRead, long contentLength, boolean done) { callback.onDownloadProgress(bytesRead, contentLength, done); } }; progressRequestListener = new ProgressRequestBody.ProgressRequestListener() { @Override public void onRequestProgress(long bytesWritten, long contentLength, boolean done) { callback.onUploadProgress(bytesWritten, contentLength, done); } }; } com.squareup.okhttp.Call call = convertHtmlValidateBeforeCall(inputData, progressListener, progressRequestListener); Type localVarReturnType = new TypeToken<OperationResponse>(){}.getType(); apiClient.executeAsync(call, localVarReturnType, callback); return call; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.refactoring.introduce.parameter; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.editor.impl.DocumentMarkupModel; import com.intellij.openapi.editor.markup.EffectType; import com.intellij.openapi.editor.markup.HighlighterTargetArea; import com.intellij.openapi.editor.markup.MarkupModel; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiMethod; import com.intellij.psi.PsiParameter; import com.intellij.psi.PsiType; import com.intellij.refactoring.IntroduceParameterRefactoring; import com.intellij.refactoring.introduce.inplace.OccurrencesChooser; import com.intellij.ui.JBColor; import com.intellij.ui.components.JBCheckBox; import com.intellij.usageView.UsageInfo; import com.intellij.util.ArrayUtil; import gnu.trove.TIntArrayList; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrParametersOwner; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression; import org.jetbrains.plugins.groovy.refactoring.introduce.GrAbstractInplaceIntroducer; import org.jetbrains.plugins.groovy.refactoring.introduce.GrIntroduceContext; import org.jetbrains.plugins.groovy.refactoring.introduce.GrIntroduceHandlerBase; import javax.swing.*; import javax.swing.border.EmptyBorder; import java.awt.*; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; public class GrInplaceParameterIntroducer extends GrAbstractInplaceIntroducer<GrIntroduceParameterSettings> { private final IntroduceParameterInfo myInfo; private final TIntArrayList myParametersToRemove; private JBCheckBox myDelegateCB; private final LinkedHashSet<String> mySuggestedNames; public GrInplaceParameterIntroducer(IntroduceParameterInfo info, GrIntroduceContext context, OccurrencesChooser.ReplaceChoice choice) { super(GrIntroduceParameterHandler.REFACTORING_NAME, choice, context); myInfo = info; GrVariable localVar = GrIntroduceHandlerBase.resolveLocalVar(context); mySuggestedNames = GroovyIntroduceParameterUtil.suggestNames(localVar, context.getExpression(), context.getStringPart(), info.getToReplaceIn(), context.getProject()); myParametersToRemove = new TIntArrayList(GroovyIntroduceParameterUtil.findParametersToRemove(info).getValues()); } @Override protected String getActionName() { return GrIntroduceParameterHandler.REFACTORING_NAME; } @NotNull @Override protected String[] suggestNames(boolean replaceAll, @Nullable GrVariable variable) { return ArrayUtil.toStringArray(mySuggestedNames); } @Override protected JComponent getComponent() { JPanel previewPanel = new JPanel(new BorderLayout()); previewPanel.add(getPreviewEditor().getComponent(), BorderLayout.CENTER); previewPanel.setBorder(new EmptyBorder(2, 2, 6, 2)); myDelegateCB = new JBCheckBox("Delegate via overloading method"); myDelegateCB.setMnemonic('l'); myDelegateCB.setFocusable(false); JPanel panel = new JPanel(new BorderLayout()); panel.add(previewPanel, BorderLayout.CENTER); panel.add(myDelegateCB, BorderLayout.SOUTH); return panel; } @Override protected void saveSettings(@NotNull GrVariable variable) { } @Override protected void updateTitle(@Nullable GrVariable variable) { if (variable == null) return; updateTitle(variable, variable.getName()); } @Override protected void updateTitle(@Nullable GrVariable variable, String value) { if (getPreviewEditor() == null || variable == null) return; final PsiElement declarationScope = ((PsiParameter)variable).getDeclarationScope(); if (declarationScope instanceof PsiMethod) { final PsiMethod psiMethod = (PsiMethod)declarationScope; final StringBuilder buf = new StringBuilder(); buf.append(psiMethod.getName()).append(" ("); boolean frst = true; final List<TextRange> ranges2Remove = new ArrayList<>(); TextRange addedRange = null; int i = 0; for (PsiParameter parameter : psiMethod.getParameterList().getParameters()) { if (frst) { frst = false; } else { buf.append(", "); } int startOffset = buf.length(); /*if (myMustBeFinal || myPanel.isGenerateFinal()) { buf.append("final "); }*/ buf.append(parameter.getType().getPresentableText()).append(" ").append(variable == parameter ? value : parameter.getName()); int endOffset = buf.length(); if (variable == parameter) { addedRange = new TextRange(startOffset, endOffset); } else if (myParametersToRemove.contains(i)) { ranges2Remove.add(new TextRange(startOffset, endOffset)); } i++; } assert addedRange != null; buf.append(")"); setPreviewText(buf.toString()); final MarkupModel markupModel = DocumentMarkupModel.forDocument(getPreviewEditor().getDocument(), myProject, true); markupModel.removeAllHighlighters(); for (TextRange textRange : ranges2Remove) { markupModel.addRangeHighlighter(textRange.getStartOffset(), textRange.getEndOffset(), 0, getTestAttributesForRemoval(), HighlighterTargetArea.EXACT_RANGE); } markupModel.addRangeHighlighter(addedRange.getStartOffset(), addedRange.getEndOffset(), 0, getTextAttributesForAdd(), HighlighterTargetArea.EXACT_RANGE); //revalidate(); } } private static TextAttributes getTextAttributesForAdd() { final TextAttributes textAttributes = new TextAttributes(); textAttributes.setEffectType(EffectType.ROUNDED_BOX); textAttributes.setEffectColor(JBColor.RED); return textAttributes; } private static TextAttributes getTestAttributesForRemoval() { final TextAttributes textAttributes = new TextAttributes(); textAttributes.setEffectType(EffectType.STRIKEOUT); textAttributes.setEffectColor(JBColor.BLACK); return textAttributes; } @Override protected GrVariable runRefactoring(GrIntroduceContext context, GrIntroduceParameterSettings settings, boolean processUsages) { GrExpressionWrapper wrapper = createExpressionWrapper(context); if (processUsages) { GrIntroduceExpressionSettingsImpl patchedSettings = new GrIntroduceExpressionSettingsImpl(settings, settings.getName(), settings.declareFinal(), settings.parametersToRemove(), settings.generateDelegate(), settings.replaceFieldsWithGetters(), context.getExpression(), context.getVar(), settings.getSelectedType(), context.getVar() != null || settings.replaceAllOccurrences(), context.getVar() != null, settings.isForceReturn()); GrIntroduceParameterProcessor processor = new GrIntroduceParameterProcessor(patchedSettings, wrapper); processor.run(); } else { WriteAction.run(() -> new GrIntroduceParameterProcessor(settings, wrapper).performRefactoring(UsageInfo.EMPTY_ARRAY)); } GrParametersOwner owner = settings.getToReplaceIn(); return ArrayUtil.getLastElement(owner.getParameters()); } @NotNull private static GrExpressionWrapper createExpressionWrapper(@NotNull GrIntroduceContext context) { GrExpression expression = context.getExpression(); GrVariable var = context.getVar(); assert expression != null || var != null ; GrExpression initializer = expression != null ? expression : var.getInitializerGroovy(); return new GrExpressionWrapper(initializer); } @Nullable @Override protected GrIntroduceParameterSettings getInitialSettingsForInplace(@NotNull GrIntroduceContext context, @NotNull OccurrencesChooser.ReplaceChoice choice, String[] names) { GrExpression expression = context.getExpression(); GrVariable var = context.getVar(); PsiType type = var != null ? var.getDeclaredType() : expression != null ? expression.getType() : null; return new GrIntroduceExpressionSettingsImpl(myInfo, names[0], false, new TIntArrayList(), false, IntroduceParameterRefactoring.REPLACE_FIELDS_WITH_GETTERS_NONE, expression, var, type, false, false, false); } @Override protected GrIntroduceParameterSettings getSettings() { return new GrIntroduceExpressionSettingsImpl(myInfo, getInputName(), false, myParametersToRemove, myDelegateCB.isSelected(), IntroduceParameterRefactoring.REPLACE_FIELDS_WITH_GETTERS_NONE, null, null, getSelectedType(), isReplaceAllOccurrences(), false, false); } }
package com.ysu.zyw.tc.api.dao.po; import java.util.ArrayList; import java.util.Date; import java.util.List; public class TcShopExample { protected String orderByClause; protected boolean distinct; protected List<Criteria> oredCriteria; protected Integer startLine; protected Integer pageSize; public TcShopExample() { oredCriteria = new ArrayList<Criteria>(); } public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } public String getOrderByClause() { return orderByClause; } public void setDistinct(boolean distinct) { this.distinct = distinct; } public boolean isDistinct() { return distinct; } public List<Criteria> getOredCriteria() { return oredCriteria; } public void or(Criteria criteria) { oredCriteria.add(criteria); } public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } public void setStartLine(Integer startLine) { this.startLine = startLine; } public Integer getStartLine() { return startLine; } public void setPageSize(Integer pageSize) { this.pageSize = pageSize; } public Integer getPageSize() { return pageSize; } protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andIdIsNull() { addCriterion("id is null"); return (Criteria) this; } public Criteria andIdIsNotNull() { addCriterion("id is not null"); return (Criteria) this; } public Criteria andIdEqualTo(String value) { addCriterion("id =", value, "id"); return (Criteria) this; } public Criteria andIdNotEqualTo(String value) { addCriterion("id <>", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThan(String value) { addCriterion("id >", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThanOrEqualTo(String value) { addCriterion("id >=", value, "id"); return (Criteria) this; } public Criteria andIdLessThan(String value) { addCriterion("id <", value, "id"); return (Criteria) this; } public Criteria andIdLessThanOrEqualTo(String value) { addCriterion("id <=", value, "id"); return (Criteria) this; } public Criteria andIdLike(String value) { addCriterion("id like", value, "id"); return (Criteria) this; } public Criteria andIdNotLike(String value) { addCriterion("id not like", value, "id"); return (Criteria) this; } public Criteria andIdIn(List<String> values) { addCriterion("id in", values, "id"); return (Criteria) this; } public Criteria andIdNotIn(List<String> values) { addCriterion("id not in", values, "id"); return (Criteria) this; } public Criteria andIdBetween(String value1, String value2) { addCriterion("id between", value1, value2, "id"); return (Criteria) this; } public Criteria andIdNotBetween(String value1, String value2) { addCriterion("id not between", value1, value2, "id"); return (Criteria) this; } public Criteria andNameIsNull() { addCriterion("name is null"); return (Criteria) this; } public Criteria andNameIsNotNull() { addCriterion("name is not null"); return (Criteria) this; } public Criteria andNameEqualTo(String value) { addCriterion("name =", value, "name"); return (Criteria) this; } public Criteria andNameNotEqualTo(String value) { addCriterion("name <>", value, "name"); return (Criteria) this; } public Criteria andNameGreaterThan(String value) { addCriterion("name >", value, "name"); return (Criteria) this; } public Criteria andNameGreaterThanOrEqualTo(String value) { addCriterion("name >=", value, "name"); return (Criteria) this; } public Criteria andNameLessThan(String value) { addCriterion("name <", value, "name"); return (Criteria) this; } public Criteria andNameLessThanOrEqualTo(String value) { addCriterion("name <=", value, "name"); return (Criteria) this; } public Criteria andNameLike(String value) { addCriterion("name like", value, "name"); return (Criteria) this; } public Criteria andNameNotLike(String value) { addCriterion("name not like", value, "name"); return (Criteria) this; } public Criteria andNameIn(List<String> values) { addCriterion("name in", values, "name"); return (Criteria) this; } public Criteria andNameNotIn(List<String> values) { addCriterion("name not in", values, "name"); return (Criteria) this; } public Criteria andNameBetween(String value1, String value2) { addCriterion("name between", value1, value2, "name"); return (Criteria) this; } public Criteria andNameNotBetween(String value1, String value2) { addCriterion("name not between", value1, value2, "name"); return (Criteria) this; } public Criteria andLocationIsNull() { addCriterion("location is null"); return (Criteria) this; } public Criteria andLocationIsNotNull() { addCriterion("location is not null"); return (Criteria) this; } public Criteria andLocationEqualTo(String value) { addCriterion("location =", value, "location"); return (Criteria) this; } public Criteria andLocationNotEqualTo(String value) { addCriterion("location <>", value, "location"); return (Criteria) this; } public Criteria andLocationGreaterThan(String value) { addCriterion("location >", value, "location"); return (Criteria) this; } public Criteria andLocationGreaterThanOrEqualTo(String value) { addCriterion("location >=", value, "location"); return (Criteria) this; } public Criteria andLocationLessThan(String value) { addCriterion("location <", value, "location"); return (Criteria) this; } public Criteria andLocationLessThanOrEqualTo(String value) { addCriterion("location <=", value, "location"); return (Criteria) this; } public Criteria andLocationLike(String value) { addCriterion("location like", value, "location"); return (Criteria) this; } public Criteria andLocationNotLike(String value) { addCriterion("location not like", value, "location"); return (Criteria) this; } public Criteria andLocationIn(List<String> values) { addCriterion("location in", values, "location"); return (Criteria) this; } public Criteria andLocationNotIn(List<String> values) { addCriterion("location not in", values, "location"); return (Criteria) this; } public Criteria andLocationBetween(String value1, String value2) { addCriterion("location between", value1, value2, "location"); return (Criteria) this; } public Criteria andLocationNotBetween(String value1, String value2) { addCriterion("location not between", value1, value2, "location"); return (Criteria) this; } public Criteria andDescribingRateIsNull() { addCriterion("describing_rate is null"); return (Criteria) this; } public Criteria andDescribingRateIsNotNull() { addCriterion("describing_rate is not null"); return (Criteria) this; } public Criteria andDescribingRateEqualTo(Integer value) { addCriterion("describing_rate =", value, "describingRate"); return (Criteria) this; } public Criteria andDescribingRateNotEqualTo(Integer value) { addCriterion("describing_rate <>", value, "describingRate"); return (Criteria) this; } public Criteria andDescribingRateGreaterThan(Integer value) { addCriterion("describing_rate >", value, "describingRate"); return (Criteria) this; } public Criteria andDescribingRateGreaterThanOrEqualTo(Integer value) { addCriterion("describing_rate >=", value, "describingRate"); return (Criteria) this; } public Criteria andDescribingRateLessThan(Integer value) { addCriterion("describing_rate <", value, "describingRate"); return (Criteria) this; } public Criteria andDescribingRateLessThanOrEqualTo(Integer value) { addCriterion("describing_rate <=", value, "describingRate"); return (Criteria) this; } public Criteria andDescribingRateIn(List<Integer> values) { addCriterion("describing_rate in", values, "describingRate"); return (Criteria) this; } public Criteria andDescribingRateNotIn(List<Integer> values) { addCriterion("describing_rate not in", values, "describingRate"); return (Criteria) this; } public Criteria andDescribingRateBetween(Integer value1, Integer value2) { addCriterion("describing_rate between", value1, value2, "describingRate"); return (Criteria) this; } public Criteria andDescribingRateNotBetween(Integer value1, Integer value2) { addCriterion("describing_rate not between", value1, value2, "describingRate"); return (Criteria) this; } public Criteria andServiceRateIsNull() { addCriterion("service_rate is null"); return (Criteria) this; } public Criteria andServiceRateIsNotNull() { addCriterion("service_rate is not null"); return (Criteria) this; } public Criteria andServiceRateEqualTo(Integer value) { addCriterion("service_rate =", value, "serviceRate"); return (Criteria) this; } public Criteria andServiceRateNotEqualTo(Integer value) { addCriterion("service_rate <>", value, "serviceRate"); return (Criteria) this; } public Criteria andServiceRateGreaterThan(Integer value) { addCriterion("service_rate >", value, "serviceRate"); return (Criteria) this; } public Criteria andServiceRateGreaterThanOrEqualTo(Integer value) { addCriterion("service_rate >=", value, "serviceRate"); return (Criteria) this; } public Criteria andServiceRateLessThan(Integer value) { addCriterion("service_rate <", value, "serviceRate"); return (Criteria) this; } public Criteria andServiceRateLessThanOrEqualTo(Integer value) { addCriterion("service_rate <=", value, "serviceRate"); return (Criteria) this; } public Criteria andServiceRateIn(List<Integer> values) { addCriterion("service_rate in", values, "serviceRate"); return (Criteria) this; } public Criteria andServiceRateNotIn(List<Integer> values) { addCriterion("service_rate not in", values, "serviceRate"); return (Criteria) this; } public Criteria andServiceRateBetween(Integer value1, Integer value2) { addCriterion("service_rate between", value1, value2, "serviceRate"); return (Criteria) this; } public Criteria andServiceRateNotBetween(Integer value1, Integer value2) { addCriterion("service_rate not between", value1, value2, "serviceRate"); return (Criteria) this; } public Criteria andDeliveryRateIsNull() { addCriterion("delivery_rate is null"); return (Criteria) this; } public Criteria andDeliveryRateIsNotNull() { addCriterion("delivery_rate is not null"); return (Criteria) this; } public Criteria andDeliveryRateEqualTo(Integer value) { addCriterion("delivery_rate =", value, "deliveryRate"); return (Criteria) this; } public Criteria andDeliveryRateNotEqualTo(Integer value) { addCriterion("delivery_rate <>", value, "deliveryRate"); return (Criteria) this; } public Criteria andDeliveryRateGreaterThan(Integer value) { addCriterion("delivery_rate >", value, "deliveryRate"); return (Criteria) this; } public Criteria andDeliveryRateGreaterThanOrEqualTo(Integer value) { addCriterion("delivery_rate >=", value, "deliveryRate"); return (Criteria) this; } public Criteria andDeliveryRateLessThan(Integer value) { addCriterion("delivery_rate <", value, "deliveryRate"); return (Criteria) this; } public Criteria andDeliveryRateLessThanOrEqualTo(Integer value) { addCriterion("delivery_rate <=", value, "deliveryRate"); return (Criteria) this; } public Criteria andDeliveryRateIn(List<Integer> values) { addCriterion("delivery_rate in", values, "deliveryRate"); return (Criteria) this; } public Criteria andDeliveryRateNotIn(List<Integer> values) { addCriterion("delivery_rate not in", values, "deliveryRate"); return (Criteria) this; } public Criteria andDeliveryRateBetween(Integer value1, Integer value2) { addCriterion("delivery_rate between", value1, value2, "deliveryRate"); return (Criteria) this; } public Criteria andDeliveryRateNotBetween(Integer value1, Integer value2) { addCriterion("delivery_rate not between", value1, value2, "deliveryRate"); return (Criteria) this; } public Criteria andComprehensiveRateIsNull() { addCriterion("comprehensive_rate is null"); return (Criteria) this; } public Criteria andComprehensiveRateIsNotNull() { addCriterion("comprehensive_rate is not null"); return (Criteria) this; } public Criteria andComprehensiveRateEqualTo(Integer value) { addCriterion("comprehensive_rate =", value, "comprehensiveRate"); return (Criteria) this; } public Criteria andComprehensiveRateNotEqualTo(Integer value) { addCriterion("comprehensive_rate <>", value, "comprehensiveRate"); return (Criteria) this; } public Criteria andComprehensiveRateGreaterThan(Integer value) { addCriterion("comprehensive_rate >", value, "comprehensiveRate"); return (Criteria) this; } public Criteria andComprehensiveRateGreaterThanOrEqualTo(Integer value) { addCriterion("comprehensive_rate >=", value, "comprehensiveRate"); return (Criteria) this; } public Criteria andComprehensiveRateLessThan(Integer value) { addCriterion("comprehensive_rate <", value, "comprehensiveRate"); return (Criteria) this; } public Criteria andComprehensiveRateLessThanOrEqualTo(Integer value) { addCriterion("comprehensive_rate <=", value, "comprehensiveRate"); return (Criteria) this; } public Criteria andComprehensiveRateIn(List<Integer> values) { addCriterion("comprehensive_rate in", values, "comprehensiveRate"); return (Criteria) this; } public Criteria andComprehensiveRateNotIn(List<Integer> values) { addCriterion("comprehensive_rate not in", values, "comprehensiveRate"); return (Criteria) this; } public Criteria andComprehensiveRateBetween(Integer value1, Integer value2) { addCriterion("comprehensive_rate between", value1, value2, "comprehensiveRate"); return (Criteria) this; } public Criteria andComprehensiveRateNotBetween(Integer value1, Integer value2) { addCriterion("comprehensive_rate not between", value1, value2, "comprehensiveRate"); return (Criteria) this; } public Criteria andSupportCodIsNull() { addCriterion("support_cod is null"); return (Criteria) this; } public Criteria andSupportCodIsNotNull() { addCriterion("support_cod is not null"); return (Criteria) this; } public Criteria andSupportCodEqualTo(Boolean value) { addCriterion("support_cod =", value, "supportCod"); return (Criteria) this; } public Criteria andSupportCodNotEqualTo(Boolean value) { addCriterion("support_cod <>", value, "supportCod"); return (Criteria) this; } public Criteria andSupportCodGreaterThan(Boolean value) { addCriterion("support_cod >", value, "supportCod"); return (Criteria) this; } public Criteria andSupportCodGreaterThanOrEqualTo(Boolean value) { addCriterion("support_cod >=", value, "supportCod"); return (Criteria) this; } public Criteria andSupportCodLessThan(Boolean value) { addCriterion("support_cod <", value, "supportCod"); return (Criteria) this; } public Criteria andSupportCodLessThanOrEqualTo(Boolean value) { addCriterion("support_cod <=", value, "supportCod"); return (Criteria) this; } public Criteria andSupportCodLike(Boolean value) { addCriterion("support_cod like", value, "supportCod"); return (Criteria) this; } public Criteria andSupportCodNotLike(Boolean value) { addCriterion("support_cod not like", value, "supportCod"); return (Criteria) this; } public Criteria andSupportCodIn(List<Boolean> values) { addCriterion("support_cod in", values, "supportCod"); return (Criteria) this; } public Criteria andSupportCodNotIn(List<Boolean> values) { addCriterion("support_cod not in", values, "supportCod"); return (Criteria) this; } public Criteria andSupportCodBetween(Boolean value1, Boolean value2) { addCriterion("support_cod between", value1, value2, "supportCod"); return (Criteria) this; } public Criteria andSupportCodNotBetween(Boolean value1, Boolean value2) { addCriterion("support_cod not between", value1, value2, "supportCod"); return (Criteria) this; } public Criteria andSupportOnlinePayIsNull() { addCriterion("support_online_pay is null"); return (Criteria) this; } public Criteria andSupportOnlinePayIsNotNull() { addCriterion("support_online_pay is not null"); return (Criteria) this; } public Criteria andSupportOnlinePayEqualTo(Boolean value) { addCriterion("support_online_pay =", value, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayNotEqualTo(Boolean value) { addCriterion("support_online_pay <>", value, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayGreaterThan(Boolean value) { addCriterion("support_online_pay >", value, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayGreaterThanOrEqualTo(Boolean value) { addCriterion("support_online_pay >=", value, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayLessThan(Boolean value) { addCriterion("support_online_pay <", value, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayLessThanOrEqualTo(Boolean value) { addCriterion("support_online_pay <=", value, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayLike(Boolean value) { addCriterion("support_online_pay like", value, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayNotLike(Boolean value) { addCriterion("support_online_pay not like", value, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayIn(List<Boolean> values) { addCriterion("support_online_pay in", values, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayNotIn(List<Boolean> values) { addCriterion("support_online_pay not in", values, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayBetween(Boolean value1, Boolean value2) { addCriterion("support_online_pay between", value1, value2, "supportOnlinePay"); return (Criteria) this; } public Criteria andSupportOnlinePayNotBetween(Boolean value1, Boolean value2) { addCriterion("support_online_pay not between", value1, value2, "supportOnlinePay"); return (Criteria) this; } public Criteria andLockedIsNull() { addCriterion("locked is null"); return (Criteria) this; } public Criteria andLockedIsNotNull() { addCriterion("locked is not null"); return (Criteria) this; } public Criteria andLockedEqualTo(Boolean value) { addCriterion("locked =", value, "locked"); return (Criteria) this; } public Criteria andLockedNotEqualTo(Boolean value) { addCriterion("locked <>", value, "locked"); return (Criteria) this; } public Criteria andLockedGreaterThan(Boolean value) { addCriterion("locked >", value, "locked"); return (Criteria) this; } public Criteria andLockedGreaterThanOrEqualTo(Boolean value) { addCriterion("locked >=", value, "locked"); return (Criteria) this; } public Criteria andLockedLessThan(Boolean value) { addCriterion("locked <", value, "locked"); return (Criteria) this; } public Criteria andLockedLessThanOrEqualTo(Boolean value) { addCriterion("locked <=", value, "locked"); return (Criteria) this; } public Criteria andLockedLike(Boolean value) { addCriterion("locked like", value, "locked"); return (Criteria) this; } public Criteria andLockedNotLike(Boolean value) { addCriterion("locked not like", value, "locked"); return (Criteria) this; } public Criteria andLockedIn(List<Boolean> values) { addCriterion("locked in", values, "locked"); return (Criteria) this; } public Criteria andLockedNotIn(List<Boolean> values) { addCriterion("locked not in", values, "locked"); return (Criteria) this; } public Criteria andLockedBetween(Boolean value1, Boolean value2) { addCriterion("locked between", value1, value2, "locked"); return (Criteria) this; } public Criteria andLockedNotBetween(Boolean value1, Boolean value2) { addCriterion("locked not between", value1, value2, "locked"); return (Criteria) this; } public Criteria andUpdatedPersonIsNull() { addCriterion("updated_person is null"); return (Criteria) this; } public Criteria andUpdatedPersonIsNotNull() { addCriterion("updated_person is not null"); return (Criteria) this; } public Criteria andUpdatedPersonEqualTo(String value) { addCriterion("updated_person =", value, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonNotEqualTo(String value) { addCriterion("updated_person <>", value, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonGreaterThan(String value) { addCriterion("updated_person >", value, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonGreaterThanOrEqualTo(String value) { addCriterion("updated_person >=", value, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonLessThan(String value) { addCriterion("updated_person <", value, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonLessThanOrEqualTo(String value) { addCriterion("updated_person <=", value, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonLike(String value) { addCriterion("updated_person like", value, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonNotLike(String value) { addCriterion("updated_person not like", value, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonIn(List<String> values) { addCriterion("updated_person in", values, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonNotIn(List<String> values) { addCriterion("updated_person not in", values, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonBetween(String value1, String value2) { addCriterion("updated_person between", value1, value2, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedPersonNotBetween(String value1, String value2) { addCriterion("updated_person not between", value1, value2, "updatedPerson"); return (Criteria) this; } public Criteria andUpdatedTimestampIsNull() { addCriterion("updated_timestamp is null"); return (Criteria) this; } public Criteria andUpdatedTimestampIsNotNull() { addCriterion("updated_timestamp is not null"); return (Criteria) this; } public Criteria andUpdatedTimestampEqualTo(Date value) { addCriterion("updated_timestamp =", value, "updatedTimestamp"); return (Criteria) this; } public Criteria andUpdatedTimestampNotEqualTo(Date value) { addCriterion("updated_timestamp <>", value, "updatedTimestamp"); return (Criteria) this; } public Criteria andUpdatedTimestampGreaterThan(Date value) { addCriterion("updated_timestamp >", value, "updatedTimestamp"); return (Criteria) this; } public Criteria andUpdatedTimestampGreaterThanOrEqualTo(Date value) { addCriterion("updated_timestamp >=", value, "updatedTimestamp"); return (Criteria) this; } public Criteria andUpdatedTimestampLessThan(Date value) { addCriterion("updated_timestamp <", value, "updatedTimestamp"); return (Criteria) this; } public Criteria andUpdatedTimestampLessThanOrEqualTo(Date value) { addCriterion("updated_timestamp <=", value, "updatedTimestamp"); return (Criteria) this; } public Criteria andUpdatedTimestampIn(List<Date> values) { addCriterion("updated_timestamp in", values, "updatedTimestamp"); return (Criteria) this; } public Criteria andUpdatedTimestampNotIn(List<Date> values) { addCriterion("updated_timestamp not in", values, "updatedTimestamp"); return (Criteria) this; } public Criteria andUpdatedTimestampBetween(Date value1, Date value2) { addCriterion("updated_timestamp between", value1, value2, "updatedTimestamp"); return (Criteria) this; } public Criteria andUpdatedTimestampNotBetween(Date value1, Date value2) { addCriterion("updated_timestamp not between", value1, value2, "updatedTimestamp"); return (Criteria) this; } public Criteria andCreatedPersonIsNull() { addCriterion("created_person is null"); return (Criteria) this; } public Criteria andCreatedPersonIsNotNull() { addCriterion("created_person is not null"); return (Criteria) this; } public Criteria andCreatedPersonEqualTo(String value) { addCriterion("created_person =", value, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonNotEqualTo(String value) { addCriterion("created_person <>", value, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonGreaterThan(String value) { addCriterion("created_person >", value, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonGreaterThanOrEqualTo(String value) { addCriterion("created_person >=", value, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonLessThan(String value) { addCriterion("created_person <", value, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonLessThanOrEqualTo(String value) { addCriterion("created_person <=", value, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonLike(String value) { addCriterion("created_person like", value, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonNotLike(String value) { addCriterion("created_person not like", value, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonIn(List<String> values) { addCriterion("created_person in", values, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonNotIn(List<String> values) { addCriterion("created_person not in", values, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonBetween(String value1, String value2) { addCriterion("created_person between", value1, value2, "createdPerson"); return (Criteria) this; } public Criteria andCreatedPersonNotBetween(String value1, String value2) { addCriterion("created_person not between", value1, value2, "createdPerson"); return (Criteria) this; } public Criteria andCreatedTimestampIsNull() { addCriterion("created_timestamp is null"); return (Criteria) this; } public Criteria andCreatedTimestampIsNotNull() { addCriterion("created_timestamp is not null"); return (Criteria) this; } public Criteria andCreatedTimestampEqualTo(Date value) { addCriterion("created_timestamp =", value, "createdTimestamp"); return (Criteria) this; } public Criteria andCreatedTimestampNotEqualTo(Date value) { addCriterion("created_timestamp <>", value, "createdTimestamp"); return (Criteria) this; } public Criteria andCreatedTimestampGreaterThan(Date value) { addCriterion("created_timestamp >", value, "createdTimestamp"); return (Criteria) this; } public Criteria andCreatedTimestampGreaterThanOrEqualTo(Date value) { addCriterion("created_timestamp >=", value, "createdTimestamp"); return (Criteria) this; } public Criteria andCreatedTimestampLessThan(Date value) { addCriterion("created_timestamp <", value, "createdTimestamp"); return (Criteria) this; } public Criteria andCreatedTimestampLessThanOrEqualTo(Date value) { addCriterion("created_timestamp <=", value, "createdTimestamp"); return (Criteria) this; } public Criteria andCreatedTimestampIn(List<Date> values) { addCriterion("created_timestamp in", values, "createdTimestamp"); return (Criteria) this; } public Criteria andCreatedTimestampNotIn(List<Date> values) { addCriterion("created_timestamp not in", values, "createdTimestamp"); return (Criteria) this; } public Criteria andCreatedTimestampBetween(Date value1, Date value2) { addCriterion("created_timestamp between", value1, value2, "createdTimestamp"); return (Criteria) this; } public Criteria andCreatedTimestampNotBetween(Date value1, Date value2) { addCriterion("created_timestamp not between", value1, value2, "createdTimestamp"); return (Criteria) this; } } public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
/* The contents of this file are subject to the license and copyright terms * detailed in the license directory at the root of the source tree (also * available online at http://fedora-commons.org/license/). */ package org.fcrepo.server.security.servletfilters; import java.util.Enumeration; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author Bill Niebel */ public class FilterSetup extends Base implements Filter { private static final Logger logger = LoggerFactory.getLogger(FilterSetup.class); protected static final String NOT_SET = "NOT SET"; protected String FILTER_NAME = NOT_SET; protected boolean inited = false; public static final String getFilterNameAbbrev(String filterName) { LoggerFactory.getLogger(FilterSetup.class).debug(">>>>>>>>>>>>>>>>>>" + filterName); String rc = filterName; if ("XmlUserfileFilter".equals(filterName)) { rc = "X"; } else if ("PubcookieFilter".equals(filterName)) { rc = "P"; } else if ("LdapFilter".equals(filterName)) { rc = "L"; } else if ("LdapFilterForAttributes".equals(filterName)) { rc = "A"; } else if ("LdapFilterForGroups".equals(filterName)) { rc = "G"; } return rc; } public void init(FilterConfig filterConfig) { String method = "init() "; if (logger.isDebugEnabled()) { logger.debug(enter(method)); } inited = false; initErrors = false; if (filterConfig != null) { FILTER_NAME = filterConfig.getFilterName(); if (FILTER_NAME == null || "".equals(FILTER_NAME)) { if (logger.isErrorEnabled()) { logger.error(format(method, "FILTER_NAME not set")); } } else { if (logger.isDebugEnabled()) { logger.debug(format(method, null, "FILTER_NAME", FILTER_NAME)); } Enumeration enumer = filterConfig.getInitParameterNames(); while (enumer.hasMoreElements()) { String key = (String) enumer.nextElement(); String value = filterConfig.getInitParameter(key); initThisSubclass(key, value); } inited = true; } } if (logger.isDebugEnabled()) { logger.debug(exit(method)); } } public void destroy() { String method = "destroy()"; if (logger.isDebugEnabled()) { logger.debug(enter(method)); } if (logger.isDebugEnabled()) { logger.debug(exit(method)); } } @Override protected void initThisSubclass(String key, String value) { logger.debug("AF.iTS"); String method = "initThisSubclass() "; if (logger.isDebugEnabled()) { logger.debug(enter(method)); } super.initThisSubclass(key, value); if (logger.isDebugEnabled()) { logger.debug(exit(method)); } } public ExtendedHttpServletRequest wrap(HttpServletRequest httpServletRequest) throws Exception { String method = "wrap() "; if (logger.isDebugEnabled()) { logger.debug(enter(method)); } ExtendedHttpServletRequestWrapper wrap = new ExtendedHttpServletRequestWrapper(httpServletRequest); if (logger.isDebugEnabled()) { logger.debug(exit(method)); } return wrap; } public boolean doThisSubclass(ExtendedHttpServletRequest extendedHttpServletRequest, HttpServletResponse response) throws Throwable { String method = "doThisSubclass() "; if (logger.isDebugEnabled()) { logger.debug(enter(method)); } String test = null; test = "init"; if (!inited || initErrors) { if (logger.isErrorEnabled()) { logger.error("inited==" + inited); } if (logger.isErrorEnabled()) { logger.error("initErrors==" + initErrors); } String msg = fail(method, test); if (logger.isErrorEnabled()) { logger.error(msg); } throw new Exception(msg); } if (logger.isDebugEnabled()) { logger.debug(pass(method, test)); } test = "HttpServletRequest"; if (!(extendedHttpServletRequest instanceof HttpServletRequest)) { String msg = fail(method, test); if (logger.isErrorEnabled()) { logger.error(msg); } throw new Exception(msg); } if (logger.isDebugEnabled()) { logger.debug(pass(method, test)); } if (logger.isDebugEnabled()) { logger.debug(exit(method)); } return false; // i.e., don't signal to terminate servlet filter chain } public void doFilter(ServletRequest servletRequest, ServletResponse response, FilterChain chain) throws ServletException { String method = "doFilter() "; if (logger.isDebugEnabled()) { logger.debug(enter(method)); } if (logger.isDebugEnabled()) { logger.debug(format(method, "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX")); } if (logger.isDebugEnabled()) { logger.debug(format(method, "FILTER_NAME", FILTER_NAME)); } String test = null; boolean terminateServletFilterChain = false; ExtendedHttpServletRequest extendedHttpServletRequest = null; try { //only one filter should wrap if (servletRequest instanceof ExtendedHttpServletRequest) { logger.debug(format(method, "using existing request...")); extendedHttpServletRequest = (ExtendedHttpServletRequest) servletRequest; } else { if (logger.isDebugEnabled()) { logger.debug(format(method, "wrapping request...")); } extendedHttpServletRequest = wrap((HttpServletRequest) servletRequest); } test = "HttpServletResponse"; if (!(response instanceof HttpServletResponse)) { String msg = fail(method, test); if (logger.isErrorEnabled()) { logger.error(msg); } throw new Exception(msg); } if (logger.isDebugEnabled()) { logger.debug(pass(method, test)); } terminateServletFilterChain = doThisSubclass(extendedHttpServletRequest, (HttpServletResponse) response); } catch (Throwable th) { logger.error("Error processing filter", th); //current filter should not break the filter chain -- go ahead, regardless of internal failure } try { if (logger.isDebugEnabled()) { logger.debug(format(method, "before next doFilter()")); logger.debug(format(method, null, "extendedHttpServletRequest") + extendedHttpServletRequest); logger.debug(format(method, "extendedHttpServletRequest", extendedHttpServletRequest.getClass() .getName())); logger.debug(format(method, null, "response" + response)); } if (terminateServletFilterChain) { logger.debug(format(method, "terminating servlet filter chain")); } else { chain.doFilter(extendedHttpServletRequest, response); } if (logger.isDebugEnabled()) { logger.debug("back from next doFilter()"); } } catch (ServletException e) { throw e; } catch (Throwable th) { logger.error("Can't do next doFilter()", th); } finally { if (logger.isDebugEnabled()) { logger.debug(exit(method)); } } } }
/** * $Id$ * $URL$ * TemplateParseUtil.java - entity-broker - Apr 10, 2008 9:57:29 AM - azeckoski ************************************************************************** * Copyright (c) 2008, 2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.entitybroker.util; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.Map.Entry; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.sakaiproject.entitybroker.EntityView; import org.sakaiproject.entitybroker.entityprovider.extension.Formats; /** * Utility class to handle the URL template parsing (entity template parsing) * * @author Aaron Zeckoski (aaron@caret.cam.ac.uk) */ public class TemplateParseUtil { public static final char SEPARATOR = EntityView.SEPARATOR; public static final char PERIOD = EntityView.PERIOD; public static final String BRACES = "[\\{\\}]"; /** * The entity prefix marker (Example value: "myprefix") */ public static final String PREFIX = EntityView.PREFIX; /** * The entity ID marker (Example value: "123") */ public static final String ID = EntityView.ID; /** * The entity extension (format) marker (Example value: "xml") */ public static final String EXTENSION = "extension"; /** * The extension with a period in front marker (Example value: ".xml") */ public static final String DOT_EXTENSION = "dot-extension"; /** * The value in the query string (without a leading ?), '' if non available (Example value: "auto=true") */ public static final String QUERY_STRING = "query-string"; /** * The value in the query string (with a leading ?), '' if non available (Example value: "?auto=true") */ public static final String QUESTION_QUERY_STRING = "question-query-string"; public static final String PREFIX_VARIABLE = "{"+PREFIX+"}"; public static final String TEMPLATE_PREFIX = SEPARATOR + PREFIX_VARIABLE; public static final String DIRECT_PREFIX = EntityView.DIRECT_PREFIX; public static final String DIRECT_PREFIX_SLASH = DIRECT_PREFIX + SEPARATOR; /** * Defines the valid chars for a replacement variable */ public static final String VALID_VAR_CHARS = "[\\p{L}}\\p{N}\\\\(\\\\)\\+\\*\\.\\-_=,:;!~@% ]"; /** * Defines the valid chars for a parser input (e.g. entity reference) */ public static final String VALID_INPUT_CHARS = "[\\p{L}\\p{N}\\\\\\(\\\\)\\+\\*\\.\\-_=,:;!~@% "+SEPARATOR+"]"; /** * Defines the valid chars for a template */ public static final String VALID_TEMPLATE_CHARS = "[\\p{L}\\p{N}\\\\\\(\\\\)\\+\\*\\.\\-_=,:;&!~@%"+SEPARATOR+"\\{\\}]"; /** * Defines the valid template chars for an outgoing template (allows ?) */ public static final String VALID_TEMPLATE_CHARS_OUTGOING = "[\\p{L}\\p{N}\\\\\\(\\\\)\\+\\*\\.\\-_=,:;&!~@%"+SEPARATOR+"\\{\\}\\?]"; /** * Defines the parse template for the "list" operation, * return a list of all records, * typically /{prefix} */ public static final String TEMPLATE_LIST = EntityView.VIEW_LIST; /** * Defines the parse template for the "show" operation, * access a record OR POST operations related to a record, * typically /{prefix}/{id} */ public static final String TEMPLATE_SHOW = EntityView.VIEW_SHOW; /** * Defines the parse template for the "new" operation, * return a form for creating a new record, * typically /{prefix}/new */ public static final String TEMPLATE_NEW = EntityView.VIEW_NEW; /** * Defines the parse template for the "edit" operation, * access the data to modify a record, * typically /{prefix}/{id}/edit */ public static final String TEMPLATE_EDIT = EntityView.VIEW_EDIT; /** * Defines the parse template for the "delete" operation, * access the data to remove a record, * typically /{prefix}/{id}/delete */ public static final String TEMPLATE_DELETE = EntityView.VIEW_DELETE; /** * Defines the order that parse templates will be processed in and * the set of parse template types (keys) which must be defined, * the first one to match will be used when parsing in a path */ public static final String[] PARSE_TEMPLATE_KEYS = { TEMPLATE_EDIT, TEMPLATE_DELETE, TEMPLATE_NEW, TEMPLATE_SHOW, TEMPLATE_LIST }; /** * Stores the preloaded default templates */ public static final List<Template> defaultTemplates; /** * Stores the preloaded processed default templates */ public static final List<PreProcessedTemplate> defaultPreprocessedTemplates; /** * Contains a set of all the common extensions */ public static Set<String> commonExtensions = new HashSet<String>(20); // static initializer static { defaultTemplates = new ArrayList<Template>(); // this load order should match the array above defaultTemplates.add( new Template(TEMPLATE_EDIT, TEMPLATE_PREFIX + SEPARATOR + "{"+ID+"}" + SEPARATOR + "edit") ); defaultTemplates.add( new Template(TEMPLATE_DELETE, TEMPLATE_PREFIX + SEPARATOR + "{"+ID+"}" + SEPARATOR + "delete") ); defaultTemplates.add( new Template(TEMPLATE_NEW, TEMPLATE_PREFIX + SEPARATOR + "new") ); defaultTemplates.add( new Template(TEMPLATE_SHOW, TEMPLATE_PREFIX + SEPARATOR + "{"+ID+"}") ); defaultTemplates.add( new Template(TEMPLATE_LIST, TEMPLATE_PREFIX) ); defaultPreprocessedTemplates = preprocessTemplates(defaultTemplates); // populate the list of common extensions Collections.addAll(commonExtensions, Formats.JSON_EXTENSIONS); Collections.addAll(commonExtensions, Formats.XML_EXTENSIONS); Collections.addAll(commonExtensions, Formats.HTML_EXTENSIONS); Collections.addAll(commonExtensions, Formats.FORM_EXTENSIONS); Collections.addAll(commonExtensions, Formats.JSONP_EXTENSIONS); Collections.addAll(commonExtensions, Formats.ATOM_EXTENSIONS); Collections.addAll(commonExtensions, Formats.RSS_EXTENSIONS); // also image extensions and other common ones commonExtensions.add("png"); commonExtensions.add("jpg"); commonExtensions.add("gif"); commonExtensions.add("jpeg"); commonExtensions.add("csv"); } /** * Check if a templateKey is valid, if not then throws {@link IllegalArgumentException} * @param templateKey a key from the set of template keys {@link #PARSE_TEMPLATE_KEYS} */ public static void validateTemplateKey(String templateKey) { boolean found = false; for (int i = 0; i < PARSE_TEMPLATE_KEYS.length; i++) { if (PARSE_TEMPLATE_KEYS[i].equals(templateKey)) { found = true; break; } } if (! found) { throw new IllegalArgumentException("Invalid parse template key: " + templateKey); } } /** * Get a default template for a specific template key * @param templateKey a key from the set of template keys {@link #PARSE_TEMPLATE_KEYS} * @return the template * @throws IllegalArgumentException if the template key is invalid */ public static String getDefaultTemplate(String templateKey) { String template = null; for (Template t : defaultTemplates) { if (t.templateKey.equals(templateKey)) { template = t.template; } } if (template == null) { throw new IllegalArgumentException("No default template available for this key: " + templateKey); } return template; } /** * Validate a template, if invalid then an exception is thrown * @param template a parse template */ public static void validateTemplate(String template) { if (template == null || "".equals(template)) { throw new IllegalArgumentException("Template cannot be null or empty string"); } else if (template.charAt(0) != SEPARATOR) { throw new IllegalArgumentException("Template ("+template+") must start with " + SEPARATOR); } else if (template.charAt(template.length()-1) == SEPARATOR) { throw new IllegalArgumentException("Template ("+template+") cannot end with " + SEPARATOR); } else if (! template.startsWith(TEMPLATE_PREFIX)) { throw new IllegalArgumentException("Template ("+template+") must start with: " + TEMPLATE_PREFIX + " :: that is SEPARATOR + \"{\"+PREFIX+\"}\""); } else if (template.indexOf("}{") != -1) { throw new IllegalArgumentException("Template ("+template+") replacement variables ({var}) " + "cannot be next to each other, " + "there must be something between each template variable"); } else if (template.indexOf("{}") != -1) { throw new IllegalArgumentException("Template ("+template+") replacement variables ({var}) " + "cannot be empty ({}), there must be a value between them"); } else if (! template.matches(VALID_TEMPLATE_CHARS+"+")) { // take out {} and check if the template uses valid chars throw new IllegalArgumentException("Template ("+template+") can only contain the following (not counting []): " + VALID_TEMPLATE_CHARS); } } /** * Validates an outgoing template to make sure it is valid * @param template an outgoing template, * if starts with / then it will be used as is and redirected to, * otherwise it will have the direct URL prefixed and will be forwarded * @return the template which should be completely valid */ public static String validateOutgoingTemplate(String template) { String validTemplate = template; if (template == null || "".equals(template)) { throw new IllegalArgumentException("Template cannot be null or empty string"); } else if (template.indexOf("{}") != -1) { throw new IllegalArgumentException("Template ("+template+") replacement variables ({var}) " + "cannot be empty ({}), there must be a value between them"); } else if (! template.matches(VALID_TEMPLATE_CHARS_OUTGOING+"+")) { // take out {} and check if the template uses valid chars throw new IllegalArgumentException("Template ("+template+") can only contain the following (not counting []): " + VALID_TEMPLATE_CHARS_OUTGOING); } if (template.startsWith(PREFIX_VARIABLE)) { validTemplate = DIRECT_PREFIX + SEPARATOR + template; } else if (template.startsWith(TEMPLATE_PREFIX)) { validTemplate = DIRECT_PREFIX + template; } else if (SEPARATOR != template.charAt(0)) { // assume the user wants /direct/ added to the URL validTemplate = DIRECT_PREFIX + SEPARATOR + template; } return validTemplate; } /** * Takes a template and replaces the segment keys with the segment values, * keys should not have {} around them yet as these will be added around each key * in the segments map * * @param template a parse template with {variables} in it * @param segments a map of all possible segment keys and values, * unused keys will be ignored * @return the template with replacement values filled in * @throws IllegalArgumentException if all template variables cannot be replaced or template is empty/null */ public static String mergeTemplate(String template, Map<String, String> segments) { if (template == null || "".equals(template) || segments == null) { throw new IllegalArgumentException("Cannot operate on null template/segments, template must not be empty"); } int vars = 0; char[] chars = template.toCharArray(); for (int i = 0; i < chars.length; i++) { if (chars[i] == '{') { vars++; } } String reference = template; int replacements = 0; for (Entry<String, String> es : segments.entrySet()) { String keyBraces = "{"+es.getKey()+"}"; if (reference.contains(keyBraces)) { reference = reference.replace(keyBraces, es.getValue()); replacements++; } } if (replacements != vars) { throw new IllegalArgumentException("Failed merge, could not replace all variables ("+vars +") in the template, only replaced " + replacements); } return reference; } /** * Find the extension from a string and return the string without the extension and the extension, * an extension is a period (".") followed by any number of non-periods, * the original input is returned as the 0th item in the array <br/> * returned array contains 3 strings: <br/> * 0 = the original input string <br/> * 1 = the string without the extension or the original if it has none <br/> * 2 = the extension OR null if there is no extension <br/> * * @param input any string * @return an array with the string without the extension or the original if it has none in position 1 * and the extension in the position 2 (or null if no extension), position 0 holds the original input string */ public static String[] findExtension(String input) { // regex pattern: ".*(\\.[^.]+|$)" String stripped = input; String extension = null; if (input != null) { int extensionLoc = input.lastIndexOf(PERIOD, input.length()); if (extensionLoc == 0) { // starts with a period so no extension, do nothing } else { int sepLoc = input.lastIndexOf(SEPARATOR, input.length()); if (extensionLoc > 0 && sepLoc < extensionLoc) { stripped = input.substring(0, extensionLoc); if ( (input.length() - 1) > extensionLoc) { extension = input.substring(extensionLoc + 1); // we only consider it an extension if we recognize the type if (!commonExtensions.contains(extension)) { stripped = input; extension = null; } } } } } return new String[] {input, stripped, extension}; } /** * Parse a string and attempt to match it to a template and then * return the match information along with all the parsed out keys and values<br/> * * @param input a string which we want to attempt to match to one of the templates * @param preprocessed the analyzed templates to attempt to match in the order they should attempt the match, * can be a single template or multiples, use {@link #preprocessTemplates(List)} to create this * (recommend caching the preprocessed templates to avoid reprocessing them over and over) * @return a the processed template analysis object OR null if no matches */ public static ProcessedTemplate parseTemplate(String input, List<PreProcessedTemplate> preprocessed) { if (preprocessed == null) { preprocessed = defaultPreprocessedTemplates; } if (input == null || "".equals(input)) { throw new IllegalArgumentException("input cannot be null or empty"); } if (! input.matches(VALID_INPUT_CHARS+"+")) { throw new IllegalArgumentException("input must consist of the following chars only (not counting []): " + VALID_INPUT_CHARS); } ProcessedTemplate analysis = null; Map<String, String> segments = new HashMap<String, String>(); // strip off the extension if there is one String[] ext = findExtension(input); input = ext[1]; String extension = ext[2]; // try to get matches for (PreProcessedTemplate ppt : preprocessed) { segments.clear(); String regex = ppt.regex + "(?:/"+VALID_INPUT_CHARS+"+|$)"; // match extras if there are any (greedy match) Pattern p = Pattern.compile(regex); Matcher m = p.matcher(input); if ( m.matches() ) { if ( m.groupCount() == ppt.variableNames.size() ) { for (int j = 0; j < m.groupCount(); j++) { String subseq = m.group(j+1); // ignore first group, it is the whole pattern if (subseq != null) { segments.put(ppt.variableNames.get(j), subseq); } } // fill in the analysis object analysis = new ProcessedTemplate(ppt.templateKey, ppt.template, regex, new ArrayList<String>(ppt.variableNames), new HashMap<String, String>(segments), extension); break; } } } if (analysis == null) { // no matches so should we die? } return analysis; } /** * Process the templates before attempting to match them, * this is here so we can reduce the load of reprocessing the same templates over and over * @param templates the templates to attempt to preprocess, can be a single template or multiples * @return the list of preprocessed templates (in the same order as input) */ public static List<PreProcessedTemplate> preprocessTemplates(List<Template> templates) { if (templates == null) { templates = defaultTemplates; } List<PreProcessedTemplate> analyzedTemplates = new ArrayList<PreProcessedTemplate>(); for (Template t : templates) { analyzedTemplates.add( preprocessTemplate(t) ); } return analyzedTemplates; } /** * process a template into a preprocessed template which can be cached * @param t the template * @return the preprocessed template */ public static PreProcessedTemplate preprocessTemplate(Template t) { if (t.incoming) { TemplateParseUtil.validateTemplate(t.template); } else { t.template = TemplateParseUtil.validateOutgoingTemplate(t.template); } List<String> vars = new ArrayList<String>(); StringBuilder regex = new StringBuilder(); String[] parts = t.template.split(BRACES); for (int j = 0; j < parts.length; j++) { String part = parts[j]; if (j % 2 == 0) { // odd parts are textual breaks // check for regex chars and escape them "[A-Za-z0-9\\\\(\\\\)\\.\\-_.=,:;&!~"+SEPARATOR+"\\{\\}\\?]" regex.append(part.replace("?", "\\?").replace(".", "\\.").replace("*", "\\*").replace("+", "\\+") .replace("-", "\\-").replace("(", "\\\\(").replace(")", "\\\\)")); } else { // even parts are replacement vars vars.add(part); regex.append("("); regex.append(VALID_VAR_CHARS); regex.append("+)"); } } return new PreProcessedTemplate(t.templateKey, t.template, regex.toString(), new ArrayList<String>(vars)); } /** * Represents a parseable template (which is basically a key and the template string), * the array which defines the set of template keys is {@link #PARSE_TEMPLATE_KEYS}<br/> * Rules for parse templates: * 1) "{","}", and {@link #SEPARATOR} are special characters and must be used as indicated only * 2) Must begin with a {@link #SEPARATOR}, must not end with a {@link #SEPARATOR} * 3) must begin with "/{prefix}" (use the {@link #SEPARATOR} and {@link #PREFIX} constants) * 3) each {var} can only be used once in a template * 4) {var} can never touch each other (i.e /{var1}{var2}/{id} is invalid) * 5) each {var} can only have the chars from {@link TemplateParseUtil#VALID_VAR_CHARS} * 6) parse templates can only have the chars from {@link TemplateParseUtil#VALID_TEMPLATE_CHARS} * 7) Empty braces ({}) cannot appear in the template * * @author Aaron Zeckoski (aaron@caret.cam.ac.uk) */ public static class Template { /** * the template key, from the set of template keys {@link #PARSE_TEMPLATE_KEYS}, * or make one up for your own templates, should be unique for this set of templates */ public String templateKey; /** * the template itself */ public String template; /** * indicates the template is an incoming template if true, outgoing template if false */ public boolean incoming = true; /** * Used to create a template for loading, defaults to an incoming template * @param templateKey template identifier, from the set of template keys {@link #PARSE_TEMPLATE_KEYS}, * must be unique for this set of templates * @param template the parseable template */ public Template(String templateKey, String template) { if (templateKey == null || "".equals(templateKey)) { templateKey = UUID.randomUUID().toString(); } this.templateKey = templateKey; this.template = template; } /** * Used to create a template for loading * @param templateKey template identifier, from the set of template keys {@link #PARSE_TEMPLATE_KEYS}, * must be unique for this set of templates * @param template the parseable template * @param incoming if true then this is an incoming template, otherwise it is an outgoing one */ public Template(String templateKey, String template, boolean incoming) { this(templateKey, template); this.incoming = incoming; } @Override public boolean equals(Object obj) { if (null == obj) return false; if (!(obj instanceof Template)) return false; else { Template castObj = (Template) obj; if (null == this.templateKey || null == castObj.templateKey) return false; else return ( this.templateKey.equals(castObj.templateKey) ); } } @Override public int hashCode() { if (null == this.templateKey) return super.hashCode(); String hashStr = this.getClass().getName() + ":" + this.templateKey.hashCode(); return hashStr.hashCode(); } } /** * Contains the data for templates, * each template must have a template key and the template itself * * @author Aaron Zeckoski (aaron@caret.cam.ac.uk) */ public static class PreProcessedTemplate extends Template { /** * The regular expression to match this template exactly */ public String regex; /** * The list of variable names found in this template */ public List<String> variableNames; protected PreProcessedTemplate(String templateKey, String template, String regex, List<String> variableNames) { super(templateKey, template); this.regex = regex; this.variableNames = variableNames; } } /** * Contains the processed template with the values from the processed input string * that was determined to be related to this template * * @author Aaron Zeckoski (aaron@caret.cam.ac.uk) */ public static class ProcessedTemplate extends PreProcessedTemplate { /** * The list of segment values (variableName -> matched value), * this will be filled in by the {@link TemplateParseUtil#parseTemplate(String, Map)} method * and will be null otherwise */ public Map<String, String> segmentValues; /** * The extension found while processing the input string, * null if none could be found */ public String extension; public ProcessedTemplate(String templateKey, String template, String regex, List<String> variableNames, Map<String, String> segmentValues, String extension) { super(templateKey, template, regex, variableNames); this.segmentValues = segmentValues; this.extension = extension; } } }
package com.github.sarxos.webcam; import static java.awt.RenderingHints.KEY_ANTIALIASING; import static java.awt.RenderingHints.VALUE_ANTIALIAS_OFF; import static java.awt.RenderingHints.VALUE_ANTIALIAS_ON; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Dimension; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.Locale; import java.util.ResourceBundle; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import javax.swing.JPanel; import javax.swing.SwingUtilities; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Simply implementation of JPanel allowing users to render pictures taken with * webcam. * * @author Bartosz Firyn (SarXos) */ public class WebcamPanel extends JPanel implements WebcamListener, PropertyChangeListener { /** * This enum is to control of how image will be drawn in the panel bounds. * * @author Sylwia Kauczor */ public static enum DrawMode { /** * Do not resize image - paint it as it is. This will make the image to * go off out the bounds if panel is smaller than image size. */ NONE, /** * Will resize image to the panel bounds. This mode does not care of the * image scale, so the final image may be disrupted. */ FILL, /** * Will fir image into the panel bounds. This will resize the image and * keep both x and y scale factor. */ FIT, } /** * Interface of the painter used to draw image in panel. * * @author Bartosz Firyn (SarXos) */ public static interface Painter { /** * Paints panel without image. * * @param g2 the graphics 2D object used for drawing */ void paintPanel(WebcamPanel panel, Graphics2D g2); /** * Paints webcam image in panel. * * @param g2 the graphics 2D object used for drawing */ void paintImage(WebcamPanel panel, BufferedImage image, Graphics2D g2); } /** * Default painter used to draw image in panel. * * @author Bartosz Firyn (SarXos) * @author Sylwia Kauczor */ public class DefaultPainter implements Painter { private String name = null; @Override public void paintPanel(WebcamPanel owner, Graphics2D g2) { assert owner != null; assert g2 != null; Object antialiasing = g2.getRenderingHint(KEY_ANTIALIASING); g2.setRenderingHint(KEY_ANTIALIASING, isAntialiasingEnabled() ? VALUE_ANTIALIAS_ON : VALUE_ANTIALIAS_OFF); g2.setBackground(Color.BLACK); g2.fillRect(0, 0, getWidth(), getHeight()); int cx = (getWidth() - 70) / 2; int cy = (getHeight() - 40) / 2; g2.setStroke(new BasicStroke(2)); g2.setColor(Color.LIGHT_GRAY); g2.fillRoundRect(cx, cy, 70, 40, 10, 10); g2.setColor(Color.WHITE); g2.fillOval(cx + 5, cy + 5, 30, 30); g2.setColor(Color.LIGHT_GRAY); g2.fillOval(cx + 10, cy + 10, 20, 20); g2.setColor(Color.WHITE); g2.fillOval(cx + 12, cy + 12, 16, 16); g2.fillRoundRect(cx + 50, cy + 5, 15, 10, 5, 5); g2.fillRect(cx + 63, cy + 25, 7, 2); g2.fillRect(cx + 63, cy + 28, 7, 2); g2.fillRect(cx + 63, cy + 31, 7, 2); g2.setColor(Color.DARK_GRAY); g2.setStroke(new BasicStroke(3)); g2.drawLine(0, 0, getWidth(), getHeight()); g2.drawLine(0, getHeight(), getWidth(), 0); String str = null; final String strInitDevice = rb.getString("INITIALIZING_DEVICE"); final String strNoImage = rb.getString("NO_IMAGE"); final String strDeviceError = rb.getString("DEVICE_ERROR"); if (!errored) { str = starting ? strInitDevice : strNoImage; } else { str = strDeviceError; } FontMetrics metrics = g2.getFontMetrics(getFont()); int w = metrics.stringWidth(str); int h = metrics.getHeight(); int x = (getWidth() - w) / 2; int y = cy - h; g2.setFont(getFont()); g2.setColor(Color.WHITE); g2.drawString(str, x, y); if (name == null) { name = webcam.getName(); } str = name; w = metrics.stringWidth(str); h = metrics.getHeight(); g2.drawString(str, (getWidth() - w) / 2, cy - 2 * h); g2.setRenderingHint(KEY_ANTIALIASING, antialiasing); } @Override public void paintImage(WebcamPanel owner, BufferedImage image, Graphics2D g2) { assert owner != null; assert image != null; assert g2 != null; Object antialiasing = g2.getRenderingHint(KEY_ANTIALIASING); g2.setRenderingHint(KEY_ANTIALIASING, isAntialiasingEnabled() ? VALUE_ANTIALIAS_ON : VALUE_ANTIALIAS_OFF); int pw = getWidth(); int ph = getHeight(); int iw = image.getWidth(); int ih = image.getHeight(); g2.setBackground(Color.BLACK); g2.fillRect(0, 0, pw, ph); switch (drawMode) { case NONE: g2.drawImage(image, 0, 0, null); break; case FILL: g2.drawImage(image, 0, 0, pw, ph, null); break; case FIT: double s = Math.max((double) iw / pw, (double) ih / ph); double niw = iw / s; double nih = ih / s; double dx = (pw - niw) / 2; double dy = (ph - nih) / 2; g2.drawImage(image, (int) dx, (int) dy, (int) niw, (int) nih, null); break; default: g2.setRenderingHint(KEY_ANTIALIASING, antialiasing); throw new RuntimeException("Mode " + drawMode + " not supported"); } if (isFPSDisplayed()) { String str = String.format("FPS: %.1f", webcam.getFPS()); int x = 5; int y = ph - 5; g2.setFont(getFont()); g2.setColor(Color.BLACK); g2.drawString(str, x + 1, y + 1); g2.setColor(Color.WHITE); g2.drawString(str, x, y); } if (isImageSizeDisplayed()) { String res = String.format("%d\u2A2F%d px", iw, ih); FontMetrics metrics = g2.getFontMetrics(getFont()); int sw = metrics.stringWidth(res); int x = pw - sw - 5; int y = ph - 5; g2.setFont(getFont()); g2.setColor(Color.BLACK); g2.drawString(res, x + 1, y + 1); g2.setColor(Color.WHITE); g2.drawString(res, x, y); } g2.setRenderingHint(KEY_ANTIALIASING, antialiasing); } } private static final class PanelThreadFactory implements ThreadFactory { private static final AtomicInteger number = new AtomicInteger(0); @Override public Thread newThread(Runnable r) { Thread t = new Thread(r, String.format("webcam-panel-scheduled-executor-%d", number.incrementAndGet())); t.setUncaughtExceptionHandler(WebcamExceptionHandler.getInstance()); t.setDaemon(true); return t; } } /** * This runnable will do nothing more than repaint panel. */ private static final class SwingRepainter implements Runnable { private WebcamPanel panel = null; public SwingRepainter(WebcamPanel panel) { this.panel = panel; } @Override public void run() { panel.repaint(); } } /** * S/N used by Java to serialize beans. */ private static final long serialVersionUID = 1L; /** * Logger. */ private static final Logger LOG = LoggerFactory.getLogger(WebcamPanel.class); /** * Minimum FPS frequency. */ public static final double MIN_FREQUENCY = 0.016; // 1 frame per minute /** * Maximum FPS frequency. */ private static final double MAX_FREQUENCY = 50; // 50 frames per second /** * Thread factory used by execution service. */ private static final ThreadFactory THREAD_FACTORY = new PanelThreadFactory(); /** * This runnable will do nothing more than repaint panel. */ private final Runnable repaint = new SwingRepainter(this); /** * Scheduled executor acting as timer. */ private ScheduledExecutorService executor = null; /** * Image updater reads images from camera and force panel to be repainted. * * @author Bartosz Firyn (SarXos) */ private class ImageUpdater implements Runnable { /** * Repaint scheduler schedule panel updates. * * @author Bartosz Firyn (sarxos) */ private class RepaintScheduler extends Thread { /** * Repaint scheduler schedule panel updates. */ public RepaintScheduler() { setUncaughtExceptionHandler(WebcamExceptionHandler.getInstance()); setName(String.format("repaint-scheduler-%s", webcam.getName())); setDaemon(true); } @Override public void run() { // do nothing when not running if (!running.get()) { return; } repaintPanel(); // loop when starting, to wait for images while (starting) { try { Thread.sleep(50); } catch (InterruptedException e) { throw new RuntimeException(e); } } // schedule update when webcam is open, otherwise schedule // second scheduler execution try { if (webcam.isOpen()) { // FPS limit means that panel rendering frequency is // limited // to the specific value and panel will not be rendered // more // often then specific value // TODO: rename FPS value in panel to rendering // frequency if (isFPSLimited()) { executor.scheduleAtFixedRate(updater, 0, (long) (1000 / frequency), TimeUnit.MILLISECONDS); } else { executor.scheduleWithFixedDelay(updater, 100, 1, TimeUnit.MILLISECONDS); } } else { executor.schedule(this, 500, TimeUnit.MILLISECONDS); } } catch (RejectedExecutionException e) { // executor has been shut down, which means that someone // stopped panel / webcam device before it was actually // completely started (it was in "starting" timeframe) LOG.warn("Executor rejected paint update"); LOG.debug("Executor rejected paint update because of", e); return; } } } /** * Update scheduler thread. */ private Thread scheduler = null; /** * Is repainter running? */ private AtomicBoolean running = new AtomicBoolean(false); /** * Start repainter. Can be invoked many times, but only first call will * take effect. */ public void start() { if (running.compareAndSet(false, true)) { executor = Executors.newScheduledThreadPool(1, THREAD_FACTORY); scheduler = new RepaintScheduler(); scheduler.start(); } } /** * Stop repainter. Can be invoked many times, but only first call will * take effect. * * @throws InterruptedException */ public void stop() throws InterruptedException { if (running.compareAndSet(true, false)) { executor.shutdown(); executor.awaitTermination(5000, TimeUnit.MILLISECONDS); scheduler.join(); } } @Override public void run() { try { update(); } catch (Throwable t) { errored = true; WebcamExceptionHandler.handle(t); } } /** * Perform single panel area update (repaint newly obtained image). */ private void update() { // do nothing when updater not running, when webcam is closed, or // panel repainting is paused if (!running.get() || !webcam.isOpen() || paused) { return; } // get new image from webcam BufferedImage tmp = webcam.getImage(); boolean repaint = true; if (tmp != null) { // ignore repaint if image is the same as before if (image == tmp) { repaint = false; } errored = false; image = tmp; } if (repaint) { repaintPanel(); } } } /** * Resource bundle. */ private ResourceBundle rb = null; /** * The mode of how the image will be resized to fit into panel bounds. * Default is {@link DrawMode#FIT} * * @see DrawMode */ private DrawMode drawMode = DrawMode.FIT; /** * Frames requesting frequency. */ private double frequency = 5; // FPS /** * Is frames requesting frequency limited? If true, images will be fetched * in configured time intervals. If false, images will be fetched as fast as * camera can serve them. */ private boolean frequencyLimit = false; /** * Display FPS. */ private boolean frequencyDisplayed = false; /** * Display image size. */ private boolean imageSizeDisplayed = false; /** * Is antialiasing enabled (true by default). */ private boolean antialiasingEnabled = true; /** * Webcam object used to fetch images. */ private final Webcam webcam; /** * Repainter is used to fetch images from camera and force panel repaint * when image is ready. */ private final ImageUpdater updater; /** * Image currently being displayed. */ private BufferedImage image = null; /** * Webcam is currently starting. */ private volatile boolean starting = false; /** * Painting is paused. */ private volatile boolean paused = false; /** * Is there any problem with webcam? */ private volatile boolean errored = false; /** * Webcam has been started. */ private final AtomicBoolean started = new AtomicBoolean(false); /** * Default painter. */ private final Painter defaultPainter = new DefaultPainter(); /** * Painter used to draw image in panel. * * @see #setPainter(Painter) * @see #getPainter() */ private Painter painter = defaultPainter; /** * Preferred panel size. */ private Dimension defaultSize = null; /** * Creates webcam panel and automatically start webcam. * * @param webcam the webcam to be used to fetch images */ public WebcamPanel(Webcam webcam) { this(webcam, true); } /** * Creates new webcam panel which display image from camera in you your * Swing application. * * @param webcam the webcam to be used to fetch images * @param start true if webcam shall be automatically started */ public WebcamPanel(Webcam webcam, boolean start) { this(webcam, null, start); } /** * Creates new webcam panel which display image from camera in you your * Swing application. If panel size argument is null, then image size will * be used. If you would like to fill panel area with image even if its size * is different, then you can use {@link WebcamPanel#setFillArea(boolean)} * method to configure this. * * @param webcam the webcam to be used to fetch images * @param size the size of panel * @param start true if webcam shall be automatically started * @see WebcamPanel#setFillArea(boolean) */ public WebcamPanel(Webcam webcam, Dimension size, boolean start) { if (webcam == null) { throw new IllegalArgumentException(String.format("Webcam argument in %s constructor cannot be null!", getClass().getSimpleName())); } this.defaultSize = size; this.webcam = webcam; this.updater = new ImageUpdater(); this.rb = WebcamUtils.loadRB(WebcamPanel.class, getLocale()); setDoubleBuffered(true); addPropertyChangeListener("locale", this); if (size == null) { Dimension r = webcam.getViewSize(); if (r == null) { r = webcam.getViewSizes()[0]; } setPreferredSize(r); } else { setPreferredSize(size); } if (start) { start(); } } /** * Set new painter. Painter is a class which pains image visible when * * @param painter the painter object to be set */ public void setPainter(Painter painter) { this.painter = painter; } /** * Get painter used to draw image in webcam panel. * * @return Painter object */ public Painter getPainter() { return painter; } @Override protected void paintComponent(Graphics g) { Graphics2D g2 = (Graphics2D) g; if (image == null) { painter.paintPanel(this, g2); } else { painter.paintImage(this, image, g2); } } /** * Open webcam and start rendering. */ public void start() { if (!started.compareAndSet(false, true)) { return; } webcam.addWebcamListener(this); LOG.debug("Starting panel rendering and trying to open attached webcam"); updater.start(); starting = true; try { if (!webcam.isOpen()) { errored = !webcam.open(); } } catch (WebcamException e) { errored = true; throw e; } finally { starting = false; repaintPanel(); } } /** * Stop rendering and close webcam. */ public void stop() { if (!started.compareAndSet(true, false)) { return; } webcam.removeWebcamListener(this); LOG.debug("Stopping panel rendering and closing attached webcam"); try { updater.stop(); } catch (InterruptedException e) { throw new RuntimeException(e); } image = null; try { if (webcam.isOpen()) { errored = !webcam.close(); } } catch (WebcamException e) { errored = true; throw e; } finally { repaintPanel(); } } /** * Repaint panel in Swing asynchronous manner. */ private void repaintPanel() { SwingUtilities.invokeLater(repaint); } /** * Pause rendering. */ public void pause() { if (paused) { return; } LOG.debug("Pausing panel rendering"); paused = true; } /** * Resume rendering. */ public void resume() { if (!paused) { return; } LOG.debug("Resuming panel rendering"); paused = false; } /** * Is frequency limit enabled? * * @return True or false */ public boolean isFPSLimited() { return frequencyLimit; } /** * Enable or disable frequency limit. Frequency limit should be used for * <b>all IP cameras working in pull mode</b> (to save number of HTTP * requests). If true, images will be fetched in configured time intervals. * If false, images will be fetched as fast as camera can serve them. * * @param frequencyLimit */ public void setFPSLimited(boolean frequencyLimit) { this.frequencyLimit = frequencyLimit; } /** * Get rendering frequency in FPS (equivalent to Hz). * * @return Rendering frequency */ public double getFPSLimit() { return frequency; } /** * Set rendering frequency (in Hz or FPS). Minimum frequency is 0.016 (1 * frame per minute) and maximum is 25 (25 frames per second). * * @param fps the frequency */ public void setFPSLimit(double fps) { if (fps > MAX_FREQUENCY) { fps = MAX_FREQUENCY; } if (fps < MIN_FREQUENCY) { fps = MIN_FREQUENCY; } this.frequency = fps; } public boolean isFPSDisplayed() { return frequencyDisplayed; } public void setFPSDisplayed(boolean displayed) { this.frequencyDisplayed = displayed; } public boolean isImageSizeDisplayed() { return imageSizeDisplayed; } public void setImageSizeDisplayed(boolean imageSizeDisplayed) { this.imageSizeDisplayed = imageSizeDisplayed; } /** * Turn on/off antialiasing. * * @param antialiasing the true to enable, false to disable antialiasing */ public void setAntialiasingEnabled(boolean antialiasing) { this.antialiasingEnabled = antialiasing; } /** * @return True is antialiasing is enabled, false otherwise */ public boolean isAntialiasingEnabled() { return antialiasingEnabled; } /** * Is webcam panel repainting starting. * * @return True if panel is starting */ public boolean isStarting() { return starting; } /** * Is webcam panel repainting started. * * @return True if panel repainting has been started */ public boolean isStarted() { return started.get(); } public boolean isFitArea() { return drawMode == DrawMode.FIT; } /** * This method will change the mode of panel area painting so the image will * be resized and will keep scale factor to fit into drawable panel bounds. * When set to false, the mode will be reset to {@link DrawMode#NONE} so * image will be drawn as it is. * * @param fitArea the fit area mode enabled or disabled */ public void setFitArea(boolean fitArea) { this.drawMode = fitArea ? DrawMode.FIT : DrawMode.NONE; } /** * Image will be resized to fill panel area if true. If false then image * will be rendered as it was obtained from webcam instance. * * @param fillArea shall image be resided to fill panel area */ public void setFillArea(boolean fillArea) { this.drawMode = fillArea ? DrawMode.FILL : DrawMode.NONE; } /** * Get value of fill area setting. Image will be resized to fill panel area * if true. If false then image will be rendered as it was obtained from * webcam instance. * * @return True if image is being resized, false otherwise */ public boolean isFillArea() { return drawMode == DrawMode.FILL; } /** * Get default painter used to draw panel. * * @return Default painter */ public Painter getDefaultPainter() { return defaultPainter; } @Override public void propertyChange(PropertyChangeEvent evt) { Locale lc = (Locale) evt.getNewValue(); if (lc != null) { rb = WebcamUtils.loadRB(WebcamPanel.class, lc); } } @Override public void webcamOpen(WebcamEvent we) { // if default size has not been provided, then use the one from webcam // device (this will be current webcam resolution) if (defaultSize == null) { setPreferredSize(webcam.getViewSize()); } } @Override public void webcamClosed(WebcamEvent we) { stop(); } @Override public void webcamDisposed(WebcamEvent we) { stop(); } @Override public void webcamImageObtained(WebcamEvent we) { // do nothing } }
package ca.uhn.fhir.model; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.util.TestUtil; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.Patient; import org.hl7.fhir.dstu3.model.Reference; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import java.math.BigDecimal; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; public class IdTypeDstu3Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(IdTypeDstu3Test.class); private static FhirContext ourCtx; private Patient parseAndEncode(Patient patient) { String encoded = ourCtx.newXmlParser().encodeResourceToString(patient); ourLog.info("\n" + encoded); return ourCtx.newXmlParser().parseResource(Patient.class, encoded); } @Test public void testBaseUrlFoo1() { IdType id = new IdType("http://my.org/foo"); assertEquals("http://my.org/foo", id.getValueAsString()); assertEquals(null, id.getIdPart()); assertEquals("foo", id.toUnqualified().getValueAsString()); assertEquals("foo", id.toUnqualifiedVersionless().getValueAsString()); assertEquals(null, id.getVersionIdPart()); assertEquals("foo", id.getResourceType()); assertEquals("http://my.org", id.getBaseUrl()); assertEquals("Patient", id.withResourceType("Patient").getValue()); assertEquals("http://foo/Patient", id.withServerBase("http://foo", "Patient").getValue()); assertEquals("http://my.org/foo//_history/2", id.withVersion("2").getValue()); } @Test public void testBaseUrlFoo2() { IdType id = new IdType("http://my.org/a/b/c/foo"); assertEquals("http://my.org/a/b/c/foo", id.getValueAsString()); assertEquals("foo", id.getIdPart()); assertEquals("c/foo", id.toUnqualified().getValueAsString()); assertEquals("c/foo", id.toUnqualifiedVersionless().getValueAsString()); assertEquals(null, id.getVersionIdPart()); assertEquals("c", id.getResourceType()); assertEquals("http://my.org/a/b", id.getBaseUrl()); assertEquals("Patient/foo", id.withResourceType("Patient").getValue()); assertEquals("http://foo/Patient/foo", id.withServerBase("http://foo", "Patient").getValue()); assertEquals("http://my.org/a/b/c/foo/_history/2", id.withVersion("2").getValue()); } @Test public void testBigDecimalIds() { IdType id = new IdType(new BigDecimal("123")); assertEquals(id.getIdPartAsBigDecimal(), new BigDecimal("123")); } /** * See #67 */ @Test public void testComplicatedLocal() { IdType id = new IdType("#Patient/cid:Patient-72/_history/1"); assertTrue(id.isLocal()); assertEquals(null, id.getBaseUrl()); assertNull(id.getResourceType()); assertNull(id.getVersionIdPart()); assertEquals("#Patient/cid:Patient-72/_history/1", id.getIdPart()); IdType id2 = new IdType("#Patient/cid:Patient-72/_history/1"); assertEquals(id, id2); id2 = id2.toUnqualified(); assertTrue(id2.isLocal()); assertNull(id2.getBaseUrl()); assertNull(id2.getResourceType()); assertNull(id2.getVersionIdPart()); assertEquals("#Patient/cid:Patient-72/_history/1", id2.getIdPart()); } @Test public void testConstructorsWithNullArguments() { IdType id = new IdType(null, null, null); assertEquals(null, id.getValue()); } @Test public void testDetectLocal() { IdType id; id = new IdType("#123"); assertEquals("#123", id.getValue()); assertTrue(id.isLocal()); id = new IdType("#Medication/499059CE-CDD4-48BC-9014-528A35D15CED/_history/1"); assertEquals("#Medication/499059CE-CDD4-48BC-9014-528A35D15CED/_history/1", id.getValue()); assertTrue(id.isLocal()); id = new IdType("http://example.com/Patient/33#123"); assertEquals("http://example.com/Patient/33#123", id.getValue()); assertFalse(id.isLocal()); } @Test public void testDetectLocalBase() { assertEquals("urn:uuid:180f219f-97a8-486d-99d9-ed631fe4fc57", new IdType("urn:uuid:180f219f-97a8-486d-99d9-ed631fe4fc57").getValue()); assertEquals(null, new IdType("urn:uuid:180f219f-97a8-486d-99d9-ed631fe4fc57").getBaseUrl()); assertEquals("urn:uuid:180f219f-97a8-486d-99d9-ed631fe4fc57", new IdType("urn:uuid:180f219f-97a8-486d-99d9-ed631fe4fc57").getIdPart()); assertEquals("cid:180f219f-97a8-486d-99d9-ed631fe4fc57", new IdType("cid:180f219f-97a8-486d-99d9-ed631fe4fc57").getValue()); assertEquals(null, new IdType("cid:180f219f-97a8-486d-99d9-ed631fe4fc57").getBaseUrl()); assertEquals("cid:180f219f-97a8-486d-99d9-ed631fe4fc57", new IdType("cid:180f219f-97a8-486d-99d9-ed631fe4fc57").getIdPart()); assertEquals("#180f219f-97a8-486d-99d9-ed631fe4fc57", new IdType("#180f219f-97a8-486d-99d9-ed631fe4fc57").getValue()); assertEquals(null, new IdType("#180f219f-97a8-486d-99d9-ed631fe4fc57").getBaseUrl()); assertEquals("#180f219f-97a8-486d-99d9-ed631fe4fc57", new IdType("#180f219f-97a8-486d-99d9-ed631fe4fc57").getIdPart()); } @Test public void testDetermineBase() { IdType rr; rr = new IdType("http://foo/fhir/Organization/123"); assertEquals("http://foo/fhir", rr.getBaseUrl()); rr = new IdType("http://foo/fhir/Organization/123/_history/123"); assertEquals("http://foo/fhir", rr.getBaseUrl()); rr = new IdType("Organization/123/_history/123"); assertEquals(null, rr.getBaseUrl()); } @Test public void testLocal() { IdType id = new IdType("#foo"); assertEquals("#foo", id.getValueAsString()); assertEquals("#foo", id.getIdPart()); assertEquals("#foo", id.toUnqualified().getValueAsString()); assertEquals("#foo", id.toUnqualifiedVersionless().getValueAsString()); assertEquals(null, id.getVersionIdPart()); assertEquals(null, id.getResourceType()); assertEquals(null, id.getBaseUrl()); assertEquals("#foo", id.withResourceType("Patient").getValue()); assertEquals("#foo", id.withServerBase("http://foo", "Patient").getValue()); assertEquals("#foo", id.withVersion("2").getValue()); } @Test public void testNormal() { IdType id = new IdType("foo"); assertEquals("foo", id.getValueAsString()); assertEquals("foo", id.getIdPart()); assertEquals("foo", id.toUnqualified().getValueAsString()); assertEquals("foo", id.toUnqualifiedVersionless().getValueAsString()); assertEquals(null, id.getVersionIdPart()); assertEquals(null, id.getResourceType()); assertEquals(null, id.getBaseUrl()); assertEquals("Patient/foo", id.withResourceType("Patient").getValue()); assertEquals("http://foo/Patient/foo", id.withServerBase("http://foo", "Patient").getValue()); assertEquals("foo/_history/2", id.withVersion("2").getValue()); } @Test public void testOid() { IdType id = new IdType("urn:oid:1.2.3.4"); assertEquals("urn:oid:1.2.3.4", id.getValueAsString()); assertEquals("urn:oid:1.2.3.4", id.getIdPart()); assertEquals("urn:oid:1.2.3.4", id.toUnqualified().getValueAsString()); assertEquals("urn:oid:1.2.3.4", id.toUnqualifiedVersionless().getValueAsString()); assertEquals(null, id.getVersionIdPart()); assertEquals(null, id.getResourceType()); assertEquals(null, id.getBaseUrl()); assertEquals("urn:oid:1.2.3.4", id.withResourceType("Patient").getValue()); assertEquals("urn:oid:1.2.3.4", id.withServerBase("http://foo", "Patient").getValue()); assertEquals("urn:oid:1.2.3.4", id.withVersion("2").getValue()); } @Test public void testParseValueAbsolute() { Patient patient = new Patient(); IdType rr = new IdType(); rr.setValue("http://foo/fhir/Organization/123"); patient.setManagingOrganization(new Reference(rr)); Patient actual = parseAndEncode(patient); Reference ref = actual.getManagingOrganization(); assertEquals("Organization", ref.getReferenceElement().getResourceType()); assertEquals("123", ref.getReferenceElement().getIdPart()); } @Test public void testParseValueAbsoluteWithVersion() { Patient patient = new Patient(); IdType rr = new IdType(); rr.setValue("http://foo/fhir/Organization/123/_history/999"); patient.setManagingOrganization(new Reference(rr)); Patient actual = parseAndEncode(patient); Reference ref = actual.getManagingOrganization(); assertEquals("Organization", ref.getReferenceElement().getResourceType()); assertEquals("123", ref.getReferenceElement().getIdPart()); assertEquals(null, ref.getReferenceElement().getVersionIdPart()); } @Test public void testParseValueMissingType1() { Patient patient = new Patient(); IdType rr = new IdType(); rr.setValue("/123"); patient.setManagingOrganization(new Reference(rr)); Patient actual = parseAndEncode(patient); Reference ref = actual.getManagingOrganization(); assertEquals(null, ref.getReferenceElement().getResourceType()); assertEquals("123", ref.getReferenceElement().getIdPart()); } @Test public void testParseValueMissingType2() { Patient patient = new Patient(); IdType rr = new IdType(); rr.setValue("123"); patient.setManagingOrganization(new Reference(rr)); Patient actual = parseAndEncode(patient); Reference ref = actual.getManagingOrganization(); assertEquals(null, ref.getReferenceElement().getResourceType()); assertEquals("123", ref.getReferenceElement().getIdPart()); } @Test public void testParseValueRelative1() { Patient patient = new Patient(); IdType rr = new IdType(); rr.setValue("Organization/123"); patient.setManagingOrganization(new Reference(rr)); Patient actual = parseAndEncode(patient); Reference ref = actual.getManagingOrganization(); assertEquals("Organization", ref.getReferenceElement().getResourceType()); assertEquals("123", ref.getReferenceElement().getIdPart()); } @Test public void testParseValueRelative2() { Patient patient = new Patient(); IdType rr = new IdType(); rr.setValue("/Organization/123"); patient.setManagingOrganization(new Reference(rr)); Patient actual = parseAndEncode(patient); Reference ref = actual.getManagingOrganization(); assertEquals("Organization", ref.getReferenceElement().getResourceType()); assertEquals("123", ref.getReferenceElement().getIdPart()); } @Test public void testParseValueWithVersion() { Patient patient = new Patient(); IdType rr = new IdType(); rr.setValue("/123/_history/999"); patient.setManagingOrganization(new Reference(rr)); Patient actual = parseAndEncode(patient); Reference ref = actual.getManagingOrganization(); assertEquals(null, ref.getReferenceElement().getResourceType()); assertEquals("123", ref.getReferenceElement().getIdPart()); assertEquals(null, ref.getReferenceElement().getVersionIdPart()); } @Test public void testUuid() { IdType id = new IdType("urn:uuid:1234-5678"); assertEquals("urn:uuid:1234-5678", id.getValueAsString()); assertEquals("urn:uuid:1234-5678", id.getIdPart()); assertEquals("urn:uuid:1234-5678", id.toUnqualified().getValueAsString()); assertEquals("urn:uuid:1234-5678", id.toUnqualifiedVersionless().getValueAsString()); assertEquals(null, id.getVersionIdPart()); assertEquals(null, id.getResourceType()); assertEquals(null, id.getBaseUrl()); assertEquals("urn:uuid:1234-5678", id.withResourceType("Patient").getValue()); assertEquals("urn:uuid:1234-5678", id.withServerBase("http://foo", "Patient").getValue()); assertEquals("urn:uuid:1234-5678", id.withVersion("2").getValue()); } @Test public void testViewMethods() { IdType i = new IdType("http://foo/fhir/Organization/123/_history/999"); assertEquals("Organization/123/_history/999", i.toUnqualified().getValue()); assertEquals("http://foo/fhir/Organization/123", i.toVersionless().getValue()); assertEquals("Organization/123", i.toUnqualifiedVersionless().getValue()); } @Test public void testWithVersionNull() { assertEquals("Patient/123", new IdType("Patient/123/_history/2").withVersion("").getValue()); } @AfterAll public static void afterClassClearContext() { TestUtil.clearAllStaticFieldsForUnitTest(); } @BeforeAll public static void beforeClass() { ourCtx = FhirContext.forDstu3(); } }
/* * The MIT License (MIT) * * Copyright (c) 2014-2019 Yegor Bugayenko * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.takes.facets.auth; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.logging.Level; import java.util.regex.Pattern; import javax.xml.bind.DatatypeConverter; import lombok.EqualsAndHashCode; import org.cactoos.text.TextOf; import org.cactoos.text.Trimmed; import org.cactoos.text.UncheckedText; import org.takes.Request; import org.takes.Response; import org.takes.facets.flash.RsFlash; import org.takes.facets.forward.RsForward; import org.takes.misc.Opt; import org.takes.rq.RqHeaders; import org.takes.rq.RqHref; import org.takes.rs.RsWithHeader; /** * Pass that checks the user according RFC-2617. * * <p>The class is immutable and thread-safe. * * @since 0.20 * @checkstyle ClassDataAbstractionCouplingCheck (100 lines) * @todo #863:30min Continue removing nulls from the code base, there are still * some places that use it and can be replaced with better code constructs. */ @EqualsAndHashCode @SuppressWarnings("PMD.TooManyMethods") public final class PsBasic implements Pass { /** * Pattern for basic authorization name. */ private static final Pattern AUTH = Pattern.compile("Basic"); /** * Entry to validate user information. */ private final PsBasic.Entry entry; /** * Realm. */ private final String realm; /** * Ctor. * @param rlm Realm * @param basic Entry */ public PsBasic(final String rlm, final PsBasic.Entry basic) { this.realm = rlm; this.entry = basic; } @Override public Opt<Identity> enter(final Request request) throws IOException { final Iterator<String> headers = new RqHeaders.Smart( new RqHeaders.Base(request) ).header("authorization").iterator(); if (!headers.hasNext()) { throw new RsForward( new RsWithHeader( String.format( "WWW-Authenticate: Basic ream=\"%s\" ", this.realm ) ), HttpURLConnection.HTTP_UNAUTHORIZED, new RqHref.Base(request).href() ); } final String decoded = new UncheckedText( new Trimmed( new TextOf( DatatypeConverter.parseBase64Binary( PsBasic.AUTH.split(headers.next())[1] ) ) ) ).asString(); final String user = decoded.split(":")[0]; final Opt<Identity> identity = this.entry.enter( user, decoded.substring(user.length() + 1) ); if (!identity.has()) { throw new RsForward( new RsWithHeader( new RsFlash("access denied", Level.WARNING), String.format( "WWW-Authenticate: Basic ream=\"%s\"", this.realm ) ), HttpURLConnection.HTTP_UNAUTHORIZED, new RqHref.Base(request).href() ); } return identity; } @Override public Response exit(final Response response, final Identity identity) { return response; } /** * Entry interface that is used to check if the received information is * valid. * * @since 0.20 */ public interface Entry { /** * Check if is a valid user. * @param user User * @param pwd Password * @return Identity. */ Opt<Identity> enter(String user, String pwd); } /** * Fake implementation of {@link PsBasic.Entry}. * * <p>The class is immutable and thread-safe. * * @since 0.20 */ public static final class Fake implements PsBasic.Entry { /** * Should we authenticate a user? */ private final boolean condition; /** * Ctor. * @param cond Condition */ public Fake(final boolean cond) { this.condition = cond; } @Override public Opt<Identity> enter(final String usr, final String pwd) { final Opt<Identity> user; if (this.condition) { user = new Opt.Single<>( new Identity.Simple( String.format("urn:basic:%s", usr) ) ); } else { user = new Opt.Empty<>(); } return user; } } /** * Empty check. * * @since 0.20 */ public static final class Empty implements PsBasic.Entry { @Override public Opt<Identity> enter(final String user, final String pwd) { return new Opt.Empty<>(); } } /** * Default entry. * * @since 0.22 */ public static final class Default implements PsBasic.Entry { /** * How keys in * {@link org.takes.facets.auth.PsBasic.Default#usernames} are * formatted. */ private static final String KEY_FORMAT = "%s %s"; /** * Encoding for URLEncode#encode. */ private static final String ENCODING = "UTF-8"; /** * Map from login/password pairs to URNs. */ private final Map<String, String> usernames; /** * Public ctor. * @param users Strings with user's login, password and URN with * space characters as separators. Each of login, password and urn * are URL-encoded substrings. For example, * {@code "mike my%20password urn:jcabi-users:michael"}. */ public Default(final String... users) { this.usernames = Default.converted(users); } @Override public Opt<Identity> enter(final String user, final String pwd) { final Opt<String> urn = this.urn(user, pwd); final Opt<Identity> identity; if (urn.has()) { try { identity = new Opt.Single<>( new Identity.Simple( URLDecoder.decode( urn.get(), PsBasic.Default.ENCODING ) ) ); } catch (final UnsupportedEncodingException ex) { throw new IllegalStateException(ex); } } else { identity = new Opt.Empty<>(); } return identity; } /** * Converts Strings with user's login, password and URN to Map. * @param users Strings with user's login, password and URN with * space characters as separators. Each of login, password and urn * are URL-encoded substrings. For example, * {@code "mike my%20password urn:jcabi-users:michael"}. * @return Map from login/password pairs to URNs. */ private static Map<String, String> converted(final String... users) { final Map<String, String> result = new HashMap<>(users.length); for (final String user : users) { final String unified = user.replace("%20", "+"); PsBasic.Default.validateUser(unified); result.put( PsBasic.Default.key(unified), unified.substring(unified.lastIndexOf(' ') + 1) ); } return result; } /** * Returns an URN corresponding to a login-password pair. * @param user Login. * @param pwd Password. * @return Opt with URN or empty if there is no such login-password * pair. */ private Opt<String> urn(final String user, final String pwd) { final String urn; try { urn = this.usernames.get( String.format( PsBasic.Default.KEY_FORMAT, URLEncoder.encode( user, PsBasic.Default.ENCODING ), URLEncoder.encode( pwd, PsBasic.Default.ENCODING ) ) ); } catch (final UnsupportedEncodingException ex) { throw new IllegalStateException(ex); } final Opt<String> opt; if (urn == null) { opt = new Opt.Empty<>(); } else { opt = new Opt.Single<>(urn); } return opt; } /** * Creates a key for * {@link org.takes.facets.auth.PsBasic.Default#usernames} map. * @param unified User string made of 3 urlencoded substrings * separated with non-urlencoded space characters. * @return Login and password parts with <pre>%20</pre> replaced with * <pre>+</pre>. */ private static String key(final String unified) { return String.format( PsBasic.Default.KEY_FORMAT, unified.substring(0, unified.indexOf(' ')), unified.substring( unified.indexOf(' ') + 1, unified.lastIndexOf(' ') ) ); } /** * Checks if a unified user string is correctly formatted. * @param unified String with urlencoded user login, password and urn * separated with spaces. */ private static void validateUser(final String unified) { final boolean amount = PsBasic.Default.countSpaces(unified) != 2; final boolean nearby = unified.indexOf(' ') + 1 == unified.lastIndexOf(' '); if (amount || nearby) { throw new IllegalArgumentException( String.format( "One of users was incorrectly formatted: %s", unified ) ); } } /** * Counts spaces in a string. * @param txt Any string. * @return Amount of spaces in string. */ private static int countSpaces(final String txt) { int spaces = 0; for (final char character : txt.toCharArray()) { if (character == ' ') { spaces += 1; } } return spaces; } } }
/* * Licensed to GraphHopper and Peter Karich under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.storage; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteOrder; import java.util.Arrays; /** * This is an in-memory data structure based on an integer array. With the possibility to be stored * on flush(). * <p/> * @author Peter Karich */ class RAMIntDataAccess extends AbstractDataAccess { private int[][] segments = new int[0][]; private boolean closed = false; private boolean store; private transient int segmentSizeIntsPower; RAMIntDataAccess( String name, String location, boolean store, ByteOrder order ) { super(name, location, order); this.store = store; } /** * @param store true if in-memory data should be saved when calling flush */ public RAMIntDataAccess setStore( boolean store ) { this.store = store; return this; } @Override public boolean isStoring() { return store; } @Override public DataAccess copyTo( DataAccess da ) { if (da instanceof RAMIntDataAccess) { copyHeader(da); RAMIntDataAccess rda = (RAMIntDataAccess) da; // TODO PERFORMANCE we could reuse rda segments! rda.segments = new int[segments.length][]; for (int i = 0; i < segments.length; i++) { int[] area = segments[i]; rda.segments[i] = Arrays.copyOf(area, area.length); } rda.setSegmentSize(segmentSizeInBytes); // leave id, store and close unchanged return da; } else { return super.copyTo(da); } } @Override public RAMIntDataAccess create( long bytes ) { if (segments.length > 0) { throw new IllegalThreadStateException("already created"); } // initialize transient values setSegmentSize(segmentSizeInBytes); ensureCapacity(Math.max(10 * 4, bytes)); return this; } @Override public boolean ensureCapacity(long bytes) { if (bytes < 0) throw new IllegalArgumentException("new capacity has to be strictly positive"); long cap = getCapacity(); long todoBytes = bytes - cap; if (todoBytes <= 0) return false; int segmentsToCreate = (int) (todoBytes / segmentSizeInBytes); if (todoBytes % segmentSizeInBytes != 0) segmentsToCreate++; try { int[][] newSegs = Arrays.copyOf(segments, segments.length + segmentsToCreate); for (int i = segments.length; i < newSegs.length; i++) { newSegs[i] = new int[1 << segmentSizeIntsPower]; } segments = newSegs; return true; } catch (OutOfMemoryError err) { throw new OutOfMemoryError(err.getMessage() + " - problem when allocating new memory. Old capacity: " + cap + ", new bytes:" + todoBytes + ", segmentSizeIntsPower:" + segmentSizeIntsPower + ", new segments:" + segmentsToCreate + ", existing:" + segments.length); } } @Override public boolean loadExisting() { if (segments.length > 0) throw new IllegalStateException("already initialized"); if (isClosed()) throw new IllegalStateException("already closed"); if (!store) return false; File file = new File(getFullName()); if (!file.exists() || file.length() == 0) { return false; } try { RandomAccessFile raFile = new RandomAccessFile(getFullName(), "r"); try { long byteCount = readHeader(raFile) - HEADER_OFFSET; if (byteCount < 0) { return false; } byte[] bytes = new byte[segmentSizeInBytes]; raFile.seek(HEADER_OFFSET); // raFile.readInt() <- too slow int segmentCount = (int) (byteCount / segmentSizeInBytes); if (byteCount % segmentSizeInBytes != 0) segmentCount++; segments = new int[segmentCount][]; for (int s = 0; s < segmentCount; s++) { int read = raFile.read(bytes) / 4; int area[] = new int[read]; for (int j = 0; j < read; j++) { area[j] = bitUtil.toInt(bytes, j * 4); } segments[s] = area; } return true; } finally { raFile.close(); } } catch (IOException ex) { throw new RuntimeException("Problem while loading " + getFullName(), ex); } } @Override public void flush() { if (closed) { throw new IllegalStateException("already closed"); } if (!store) { return; } try { RandomAccessFile raFile = new RandomAccessFile(getFullName(), "rw"); try { long len = getCapacity(); writeHeader(raFile, len, segmentSizeInBytes); raFile.seek(HEADER_OFFSET); // raFile.writeInt() <- too slow, so copy into byte array for (int s = 0; s < segments.length; s++) { int area[] = segments[s]; int intLen = area.length; byte[] byteArea = new byte[intLen * 4]; for (int i = 0; i < intLen; i++) { bitUtil.fromInt(byteArea, area[i], i * 4); } raFile.write(byteArea); } } finally { raFile.close(); } } catch (Exception ex) { throw new RuntimeException("Couldn't store integers to " + toString(), ex); } } @Override public final void setInt( long bytePos, int value ) { assert segmentSizeIntsPower > 0 : "call create or loadExisting before usage!"; bytePos >>>= 2; int bufferIndex = (int) (bytePos >>> segmentSizeIntsPower); int index = (int) (bytePos & indexDivisor); segments[bufferIndex][index] = value; } @Override public final int getInt( long bytePos ) { assert segmentSizeIntsPower > 0 : "call create or loadExisting before usage!"; bytePos >>>= 2; int bufferIndex = (int) (bytePos >>> segmentSizeIntsPower); int index = (int) (bytePos & indexDivisor); return segments[bufferIndex][index]; } @Override public final void setShort( long bytePos, short value ) { assert segmentSizeIntsPower > 0 : "call create or loadExisting before usage!"; if (bytePos % 4 != 0 && bytePos % 4 != 2) throw new IllegalMonitorStateException("bytePos of wrong multiple for RAMInt " + bytePos); long tmpIndex = bytePos >>> 1; int bufferIndex = (int) (tmpIndex >>> segmentSizeIntsPower); int index = (int) (tmpIndex & indexDivisor); if (tmpIndex * 2 == bytePos) segments[bufferIndex][index] = value; else segments[bufferIndex][index] = value << 16; } @Override public final short getShort( long bytePos ) { assert segmentSizeIntsPower > 0 : "call create or loadExisting before usage!"; if (bytePos % 4 != 0 && bytePos % 4 != 2) throw new IllegalMonitorStateException("bytePos of wrong multiple for RAMInt " + bytePos); long tmpIndex = bytePos >>> 1; int bufferIndex = (int) (tmpIndex >>> segmentSizeIntsPower); int index = (int) (tmpIndex & indexDivisor); if (tmpIndex * 2 == bytePos) return (short) segments[bufferIndex][index]; else return (short) (segments[bufferIndex][index] >> 16); } @Override public void getBytes( long bytePos, byte[] values, int length ) { throw new UnsupportedOperationException(toString() + " does not support byte based acccess. Use RAMDataAccess instead"); } @Override public void setBytes( long bytePos, byte[] values, int length ) { throw new UnsupportedOperationException(toString() + " does not support byte based acccess. Use RAMDataAccess instead"); } @Override public void close() { super.close(); segments = new int[0][]; closed = true; } @Override public long getCapacity() { return (long) getSegments() * segmentSizeInBytes; } @Override public int getSegments() { return segments.length; } @Override public DataAccess setSegmentSize( int bytes ) { super.setSegmentSize(bytes); segmentSizeIntsPower = (int) (Math.log(segmentSizeInBytes / 4) / Math.log(2)); indexDivisor = segmentSizeInBytes / 4 - 1; return this; } @Override public void trimTo( long capacity ) { if (capacity < segmentSizeInBytes) { capacity = segmentSizeInBytes; } int remainingSegments = (int) (capacity / segmentSizeInBytes); if (capacity % segmentSizeInBytes != 0) { remainingSegments++; } segments = Arrays.copyOf(segments, remainingSegments); } boolean releaseSegment( int segNumber ) { segments[segNumber] = null; return true; } @Override public void rename( String newName ) { if (!checkBeforeRename(newName)) { return; } if (store) { super.rename(newName); } // in every case set the name name = newName; } @Override protected boolean isIntBased() { return true; } @Override public DAType getType() { if (isStoring()) return DAType.RAM_INT_STORE; return DAType.RAM_INT; } }
package com.booking.replication.applier.hbase.schema; import com.booking.replication.applier.hbase.HBaseApplier; import com.booking.replication.applier.hbase.StorageConfig; import com.booking.replication.augmenter.model.schema.SchemaAtPositionCache; import com.booking.replication.augmenter.model.schema.SchemaSnapshot; import com.booking.replication.augmenter.model.schema.SchemaTransitionSequence; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter; import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.util.Bytes; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** * Created by bdevetak on 27/11/15. */ public class HBaseSchemaManager { private static final Logger LOG = LogManager.getLogger(HBaseSchemaManager.class); private final Configuration hbaseConfig; private final StorageConfig storageConfig; private Connection connection; private final Map<String, Integer> seenHBaseTables = new ConcurrentHashMap<>(); private final Map<String, Object> configuration; // Mirrored tables private static final int MIRRORED_TABLE_DEFAULT_REGIONS = 16; private static final int MIRRORED_TABLE_NUMBER_OF_VERSIONS = 1000; // schema history table private static final int SCHEMA_HISTORY_TABLE_NR_VERSIONS = 1; private final boolean DRY_RUN; private static boolean USE_SNAPPY; private static final byte[] CF = Bytes.toBytes("d"); private static final ObjectMapper MAPPER = new ObjectMapper(); { Set<String> includeInColumns = new HashSet<>(); Collections.addAll(includeInColumns, "name", "columnType", "key", "valueDefault", "collation", "nullable"); SimpleFilterProvider filterProvider = new SimpleFilterProvider(); filterProvider.addFilter("column", SimpleBeanPropertyFilter.filterOutAllExcept(includeInColumns)); MAPPER.setFilterProvider(filterProvider); } public HBaseSchemaManager(Map<String, Object> configuration) { DRY_RUN = (boolean) configuration.get(HBaseApplier.Configuration.DRYRUN); USE_SNAPPY = (boolean) configuration.get(HBaseApplier.Configuration.HBASE_USE_SNAPPY); this.configuration = configuration; this.storageConfig = StorageConfig.build(configuration); this.hbaseConfig = storageConfig.getConfig(); if (!DRY_RUN) { try { connection = ConnectionFactory.createConnection(storageConfig.getConfig()); LOG.info("HBaseSchemaManager successfully established connection to HBase."); } catch (IOException e) { LOG.error("HBaseSchemaManager could not connect to HBase", e); } } } public synchronized void createHBaseTableIfNotExists(String hbaseTableName) throws IOException { if (!DRY_RUN) { hbaseTableName = hbaseTableName.toLowerCase(); try ( Admin admin = connection.getAdmin()) { if (seenHBaseTables.containsKey(hbaseTableName)) { return; } if (connection == null) { connection = ConnectionFactory.createConnection(storageConfig.getConfig()); } TableName tableName; String namespace = (String) configuration.get(HBaseApplier.Configuration.TARGET_NAMESPACE); if (namespace.isEmpty()) { tableName = TableName.valueOf(hbaseTableName); } else { tableName = TableName.valueOf(namespace, hbaseTableName); } if (admin.tableExists(tableName)) { LOG.warn("Table " + tableName + " exists in HBase, but not in schema cache. Probably a case of a table that was dropped and than created again"); seenHBaseTables.put(hbaseTableName, 1); } else { HTableDescriptor tableDescriptor = new HTableDescriptor(tableName); HColumnDescriptor cd = new HColumnDescriptor("d"); if (USE_SNAPPY) { cd.setCompressionType(Compression.Algorithm.SNAPPY); } cd.setMaxVersions(MIRRORED_TABLE_NUMBER_OF_VERSIONS); tableDescriptor.addFamily(cd); tableDescriptor.setCompactionEnabled(true); admin.createTable(tableDescriptor); seenHBaseTables.put(hbaseTableName, 1); LOG.warn("Created hbase table " + hbaseTableName); } } catch (IOException e) { throw new IOException("Failed to create table in HBase", e); } } } public void writeSchemaSnapshot(SchemaSnapshot schemaSnapshot, Map<String, Object> configuration) throws IOException, SchemaTransitionException { // get sql_statement String ddl = schemaSnapshot.getSchemaTransitionSequence().getDdl(); if (ddl == null) { throw new SchemaTransitionException("DDL can not be null"); } // get pre/post schemas SchemaAtPositionCache schemaSnapshotBefore = schemaSnapshot.getSchemaBefore(); SchemaAtPositionCache schemaSnapshotAfter = schemaSnapshot.getSchemaAfter(); Map<String, String> createStatementsBefore = schemaSnapshot.getSchemaBefore().getCreateTableStatements(); Map<String, String> createStatementsAfter = schemaSnapshot.getSchemaAfter().getCreateTableStatements(); SchemaTransitionSequence schemaTransitionSequence = schemaSnapshot.getSchemaTransitionSequence(); // json-ify // TODO: add unit test that makes sure that snapshot format is compatible with HBaseSnapshotter String jsonSchemaSnapshotBefore = MAPPER.writeValueAsString(schemaSnapshotBefore); String jsonSchemaSnapshotAfter = MAPPER.writeValueAsString(schemaSnapshotAfter); String jsonSchemaTransitionSequence = MAPPER.writeValueAsString(schemaTransitionSequence); String jsonCreateStatementsBefore = MAPPER.writeValueAsString(createStatementsBefore); String jsonCreateStatementsAfter = MAPPER.writeValueAsString(createStatementsAfter); // get event timestamp Long eventTimestamp = schemaSnapshot.getSchemaTransitionSequence().getSchemaTransitionTimestamp(); String hbaseTableName = HBaseTableNameMapper.getSchemaSnapshotHistoryHBaseTableName(configuration); String hbaseRowKey = eventTimestamp.toString(); if ((boolean)configuration.get(HBaseApplier.Configuration.INITIAL_SNAPSHOT_MODE)) { // in initial-snapshot mode timestamp is overridden by 0 so all create statements // fall under the same timestamp. This is ok since there should be only one schema // snapshot for the initial-snapshot. However, having key=0 is not good, so replace // it with: hbaseRowKey = "initial-snapshot"; } try ( Admin admin = connection.getAdmin() ) { if (connection == null) { connection = ConnectionFactory.createConnection(storageConfig.getConfig()); } TableName tableName = TableName.valueOf(hbaseTableName); if (!admin.tableExists(tableName)) { synchronized (HBaseSchemaManager.class) { if (!admin.tableExists(tableName)) { LOG.info("table " + hbaseTableName + " does not exist in HBase. Creating..."); HTableDescriptor tableDescriptor = new HTableDescriptor(tableName); HColumnDescriptor cd = new HColumnDescriptor("d"); cd.setMaxVersions(SCHEMA_HISTORY_TABLE_NR_VERSIONS); tableDescriptor.addFamily(cd); tableDescriptor.setCompactionEnabled(true); admin.createTable(tableDescriptor); } else { LOG.info("Table " + hbaseTableName + " already exists in HBase. Probably a case of other thread created it."); } } } else { LOG.info("Table " + hbaseTableName + " already exists in HBase. Probably a case of replaying the binlog."); } Put put = new Put(Bytes.toBytes(hbaseRowKey)); String ddlColumnName = "ddl"; put.addColumn( CF, Bytes.toBytes(ddlColumnName), eventTimestamp, Bytes.toBytes(ddl) ); String schemaTransitionSequenceColumnName = "schemaTransitionSequence"; put.addColumn( CF, Bytes.toBytes(schemaTransitionSequenceColumnName), eventTimestamp, Bytes.toBytes(jsonSchemaTransitionSequence) ); String schemaSnapshotPreColumnName = "schemaPreChange"; put.addColumn( CF, Bytes.toBytes(schemaSnapshotPreColumnName), eventTimestamp, Bytes.toBytes(jsonSchemaSnapshotBefore) ); String schemaSnapshotPostColumnName = "schemaPostChange"; put.addColumn( CF, Bytes.toBytes(schemaSnapshotPostColumnName), eventTimestamp, Bytes.toBytes(jsonSchemaSnapshotAfter) ); String preChangeCreateStatementsColumn = "createsPreChange"; put.addColumn( CF, Bytes.toBytes(preChangeCreateStatementsColumn), eventTimestamp, Bytes.toBytes(jsonCreateStatementsBefore) ); String postChangeCreateStatementsColumn = "createsPostChange"; put.addColumn( CF, Bytes.toBytes(postChangeCreateStatementsColumn), eventTimestamp, Bytes.toBytes(jsonCreateStatementsAfter) ); Table hbaseTable = connection.getTable(tableName); hbaseTable.put(put); hbaseTable.close(); } catch (TableExistsException tee) { LOG.warn("trying to create hbase table that already exists", tee); } catch (IOException ioe) { throw new SchemaTransitionException("Failed to store schemaChangePointSnapshot in HBase.", ioe); } } }
/* * Copyright 2009 FatWire Corporation. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.fatwire.gst.foundation.test; import java.io.OutputStream; import java.security.Principal; import java.util.Collection; import java.util.Enumeration; import java.util.Map; import java.util.Vector; import COM.FutureTense.Cache.Satellite; import COM.FutureTense.ContentServer.PageData; import COM.FutureTense.Interfaces.FTVAL; import COM.FutureTense.Interfaces.FTValList; import COM.FutureTense.Interfaces.ICS; import COM.FutureTense.Interfaces.IJSPObject; import COM.FutureTense.Interfaces.IList; import COM.FutureTense.Interfaces.IMIMENotifier; import COM.FutureTense.Interfaces.IProperties; import COM.FutureTense.Interfaces.ISearchEngine; import COM.FutureTense.Interfaces.IServlet; import COM.FutureTense.Interfaces.ISyncHash; import COM.FutureTense.Interfaces.IURLDefinition; import COM.FutureTense.Interfaces.PastramiEngine; import COM.FutureTense.Util.ftErrors; import COM.FutureTense.XML.Template.Seed; import com.fatwire.cs.core.db.PreparedStmt; import com.fatwire.cs.core.db.StatementParam; import com.fatwire.cs.core.uri.Definition; /** * @deprecated - moved to new namespace * @see "tools.gsf.test.MockICS" */ @SuppressWarnings("deprecation") public class MockICS implements ICS { public boolean AppEvent(String arg0, String arg1, String arg2, FTValList arg3) { return false; } public boolean BlobServer(FTValList arg0, IMIMENotifier arg1, OutputStream arg2) { return false; } public boolean CallElement(String arg0, FTValList arg1) { return false; } public IList CallSQL(String arg0, String arg1, int arg2, boolean arg3, StringBuffer arg4) { return null; } public String getSQL(String s) { throw new UnsupportedOperationException("NYI"); } public IList CallSQL(String arg0, String arg1, int arg2, boolean arg3, boolean arg4, StringBuffer arg5) { return null; } public IList CatalogDef(String arg0, String arg1, StringBuffer arg2) { return null; } public boolean CatalogManager(FTValList arg0) { return false; } public boolean CatalogManager(FTValList arg0, Object arg1) { return false; } public void ClearErrno() { } public boolean CommitBatchedCommands(Object arg0) { return false; } public boolean CopyList(String arg0, String arg1) { return false; } public boolean DeleteSynchronizedHash(String arg0) { return false; } public boolean DestroyEvent(String arg0) { return false; } public void DisableCache() { } public boolean DisableEvent(String arg0) { return false; } public void DisableFragmentCache() { } public boolean EmailEvent(String arg0, String arg1, String arg2, String arg3) { return false; } public boolean EmailEvent(String arg0, String arg1, String arg2, String arg3, String arg4, String arg5, FTValList arg6) { return false; } public boolean EnableEvent(String arg0) { return false; } public boolean FlushCatalog(String arg0) { return false; } public void FlushStream() { } public byte[] GetBin(String arg0) { return null; } public int GetCatalogType(String arg0) { return 0; } public FTVAL GetCgi(String arg0) { return null; } public int GetCounter(String arg0) throws Exception { return 0; } public int GetErrno() { return 0; } public IList GetList(String arg0) { return null; } public IList GetList(String arg0, boolean arg1) { return null; } public Object GetObj(String arg0) { return null; } public String GetProperty(String arg0) { return null; } public String GetProperty(String arg0, String arg1, boolean arg2) { return null; } public String GetSSVar(String arg0) { return null; } @SuppressWarnings("rawtypes") public Enumeration GetSSVars() { return null; } public ISearchEngine GetSearchEngine(String arg0, String arg1, StringBuffer arg2) { return null; } public String GetSearchEngineList() { return null; } public ISyncHash GetSynchronizedHash(String arg0, boolean arg1, int arg2, int arg3, boolean arg4, boolean arg5) { return null; } public ISyncHash GetSynchronizedHash(String arg0, boolean arg1, int arg2, int arg3, boolean arg4, boolean arg5, @SuppressWarnings("rawtypes") Collection arg6) { return null; } public String GetVar(String arg0) { return null; } @SuppressWarnings({ "rawtypes" }) public Enumeration GetVars() { return null; } public boolean IndexAdd(String arg0, String arg1, String arg2, String arg3, String arg4, String arg5, String arg6, FTValList arg7, FTValList arg8, FTValList arg9, String arg10, String arg11, StringBuffer arg12) { return false; } public boolean IndexCreate(String arg0, FTValList arg1, String arg2, String arg3, StringBuffer arg4) { return false; } public boolean IndexDestroy(String arg0, String arg1, String arg2, StringBuffer arg3) { return false; } public boolean IndexExists(String arg0, String arg1, String arg2, StringBuffer arg3) { return false; } public boolean IndexRemove(String arg0, String arg1, String arg2, String arg3, StringBuffer arg4) { return false; } public boolean IndexReplace(String arg0, String arg1, String arg2, String arg3, String arg4, String arg5, String arg6, FTValList arg7, FTValList arg8, FTValList arg9, String arg10, String arg11, StringBuffer arg12) { return false; } public boolean InsertPage(String arg0, FTValList arg1) { return false; } public boolean IsElement(String arg0) { return false; } public boolean IsSystemSecure() { return false; } public boolean IsTracked(String arg0) { return false; } public boolean LoadProperty(String arg0) { return false; } public void LogMsg(String arg0) { } @SuppressWarnings("rawtypes") public int Mirror(Vector arg0, String arg1, String arg2, String arg3, String arg4, String arg5, int arg6, boolean arg7, boolean arg8, int arg9, StringBuffer arg10) { return 0; } @SuppressWarnings("rawtypes") public int Mirror(Vector arg0, Vector arg1, String arg2, String arg3, String arg4, String arg5, String arg6, int arg7, boolean arg8, boolean arg9, int arg10, StringBuffer arg11) { return 0; } @SuppressWarnings("rawtypes") public int Mirror(Vector arg0, String arg1, String arg2, String arg3, String arg4, String arg5, int arg6, boolean arg7, String arg8, String arg9, String arg10, String arg11, boolean arg12, int arg13, StringBuffer arg14) { return 0; } public int Mirror(IList arg0, String arg1, String arg2, String arg3, String arg4, String arg5, int arg6, boolean arg7, String arg8, String arg9, String arg10, String arg11, boolean arg12, int arg13, String arg14, StringBuffer arg15) { return 0; } @SuppressWarnings("rawtypes") public int Mirror(Vector arg0, Vector arg1, String arg2, String arg3, String arg4, String arg5, String arg6, int arg7, boolean arg8, String arg9, String arg10, String arg11, String arg12, boolean arg13, int arg14, StringBuffer arg15) { return 0; } public Seed NewSeedFromTagname(String arg0) { return null; } public Object PopObj(String arg0) { return null; } public void PopVars() { } public boolean PushObj(String arg0, Object arg1) { return false; } public void PushVars() { } public IList QueryEvents(String arg0, String arg1, Boolean arg2, String arg3) { return null; } public int RTCommit(String arg0, String arg1, String arg2, boolean arg3) { return 0; } public int RTDeleteRevision(String arg0, String arg1, int arg2) { return 0; } public IList RTHistory(String arg0, String arg1, String arg2, String arg3, String arg4, String arg5, String arg6) { return null; } public IList RTInfo(String arg0, String arg1) { return null; } public int RTLock(String arg0, String arg1) { return 0; } public int RTRelease(String arg0, String arg1) { return 0; } public IList RTRetrieveRevision(String arg0, String arg1, int arg2, String arg3) { return null; } public IList RTRetrieveRevision(String arg0, String arg1, String arg2, String arg3) { return null; } public int RTRollback(String arg0, String arg1, int arg2) { return 0; } public int RTRollback(String arg0, String arg1, String arg2) { return 0; } public int RTSetVersions(String arg0, int arg1) { return 0; } public int RTTrackTable(String arg0, String arg1, int arg2) { return 0; } public int RTUnlockRecord(String arg0, String arg1) { return 0; } public int RTUntrackTable(String arg0) { return 0; } public IList ReadEvent(String arg0, String arg1) { return null; } public String ReadPage(String arg0, FTValList arg1) { return null; } public boolean RegisterList(String arg0, IList arg1) { return false; } public void RemoveCounter(String arg0) { } public void RemoveSSVar(String arg0) { } public void RemoveVar(String arg0) { } public boolean RenameList(String arg0, String arg1) { return false; } public String ResolveVariables(String arg0) { return null; } public String ResolveVariables(String arg0, boolean arg1) { return null; } public boolean RestoreProperty(boolean arg0) { return false; } public boolean RollbackBatchedCommands(Object arg0) { return false; } public IList SQL(PreparedStmt arg0, StatementParam arg1, boolean arg2) { return null; } public IList SQL(String arg0, String arg1, String arg2, int arg3, boolean arg4, StringBuffer arg5) { return null; } public IList SQL(String arg0, String arg1, String arg2, int arg3, boolean arg4, boolean arg5, StringBuffer arg6) { return null; } public String SQLExp(String arg0, String arg1, String arg2, String arg3, String arg4) { return null; } public String SQLExp(String arg0, String arg1, String arg2, String arg3, String arg4, String arg5) { return null; } public IList Search(String arg0, String arg1, String arg2, String arg3, int arg4, FTValList arg5, String arg6, String arg7, String arg8, StringBuffer arg9) { return null; } public boolean SearchDateToNative(String arg0, StringBuffer arg1, String arg2, String arg3, StringBuffer arg4) { return false; } public IList SelectTo(String arg0, String arg1, String arg2, String arg3, int arg4, String arg5, boolean arg6, StringBuffer arg7) { return null; } public boolean SendMail(String arg0, String arg1, String arg2) { return false; } public boolean SendMail(String arg0, String arg1, String arg2, String arg3, String arg4) { return false; } public boolean SendMail(String arg0, String arg1, String arg2, String arg3, String arg4, String arg5, FTValList arg6) { return false; } public boolean SessionExists(String arg0) { return false; } public String SessionID() { return null; } public boolean SetCookie(String arg0, String arg1, int arg2, String arg3, String arg4, boolean arg5) { return false; } public void SetCounter(String arg0, int arg1) throws Exception { } public void SetErrno(int arg0) { } public boolean SetObj(String arg0, Object arg1) { return false; } public void SetSSVar(String arg0, String arg1) { } public void SetSSVar(String arg0, int arg1) { } public void SetVar(String arg0, String arg1) { } public void SetVar(String arg0, int arg1) { } public void SetVar(String arg0, FTVAL arg1) { } public Object StartBatchContext() { return null; } public void StreamBinary(byte[] arg0, int arg1, int arg2) { } public void StreamEvalBytes(String arg0) { } public void StreamHeader(String arg0, String arg1) { } public void StreamText(String arg0) { } public void ThrowException() { } public boolean TreeManager(FTValList arg0) { return false; } public boolean TreeManager(FTValList arg0, Object arg1) { return false; } public boolean UserIsMember(String arg0) { return false; } public void close() { } public boolean dbDebug() { return false; } @SuppressWarnings("rawtypes") public void decode(String arg0, Map arg1) { } public IJSPObject deployJSPData(String arg0, String arg1, StringBuffer arg2) { return null; } public IJSPObject deployJSPFile(String arg0, String arg1, StringBuffer arg2) { return null; } public String diskFileName() { return null; } public String diskFileName(String arg0, FTValList arg1) { return null; } public String diskFileName(String arg0, String arg1) { return null; } @SuppressWarnings("rawtypes") public String encode(String arg0, Map arg1, boolean arg2) { return null; } public boolean eventDebug() { return false; } public String genID(boolean arg0) { return null; } public Object getAttribute(String arg0) { return null; } @SuppressWarnings("rawtypes") public Enumeration getAttributeNames() { return null; } public ftErrors getComplexError() { return null; } public String getCookie(String arg0) { return null; } public IProperties getIProperties() { return null; } public IServlet getIServlet() { return null; } public String getLocaleString(String arg0, String arg1) { return null; } public String getNamespace() { return null; } public PageData getPageData(String arg0) { return null; } public PastramiEngine getPastramiEngine() { return null; } public Satellite getSatellite(String arg0) { return null; } public int getTrackingStatus(String arg0, String arg1) { return 0; } public String getURL(IURLDefinition arg0) { return null; } public String getURL(Definition arg0, String arg1) { return null; } public Principal getUserPrincipal() { return null; } public byte[] grabCacheStatus() { return null; } public FTValList grabHeaders() { return null; } public boolean ioErrorThrown() { return false; } public boolean isCacheable(String arg0) { return false; } public String literal(String arg0, String arg1, String arg2) { return null; } public String[] pageCriteriaKeys(String arg0) { return null; } public String pageURL() { return null; } public String pageURL(String arg0, FTValList arg1) { return null; } public boolean pastramiDebug() { return false; } public boolean pgCacheDebug() { return false; } public void removeAttribute(String arg0) { } public boolean rsCacheDebug() { return false; } public String runTag(String arg0, FTValList arg1) { return null; } public boolean sessionDebug() { return false; } public void setAttribute(String arg0, Object arg1) { } public void setComplexError(ftErrors arg0) { } public boolean syncDebug() { return false; } public boolean systemDebug() { return false; } public boolean systemSession() { return false; } public boolean timeDebug() { return false; } public boolean xmlDebug() { return false; } public IList CatalogIndexDef(String arg0, String arg1, StringBuffer arg2) { return null; } @Override public boolean BlobServer(FTValList arg0, OutputStream arg1) { return false; } @Override public boolean IsTrackedNewFormat(String arg0) { return false; } @Override public String getElementArgumentValue(String arg0, String arg1) { return null; } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http; import org.junit.Test; import java.util.Date; import java.util.Iterator; import java.util.Set; import static org.junit.Assert.*; public class ServerCookieDecoderTest { @Test public void testDecodingSingleCookie() { String cookieString = "myCookie=myValue"; cookieString = cookieString.replace("XXX", HttpHeaderDateFormat.get().format(new Date(System.currentTimeMillis() + 50000))); Set<Cookie> cookies = ServerCookieDecoder.decode(cookieString); assertEquals(1, cookies.size()); Cookie cookie = cookies.iterator().next(); assertNotNull(cookie); assertEquals("myValue", cookie.value()); } @Test public void testDecodingMultipleCookies() { String c1 = "myCookie=myValue;"; String c2 = "myCookie2=myValue2;"; String c3 = "myCookie3=myValue3;"; Set<Cookie> cookies = ServerCookieDecoder.decode(c1 + c2 + c3); assertEquals(3, cookies.size()); Iterator<Cookie> it = cookies.iterator(); Cookie cookie = it.next(); assertNotNull(cookie); assertEquals("myValue", cookie.value()); cookie = it.next(); assertNotNull(cookie); assertEquals("myValue2", cookie.value()); cookie = it.next(); assertNotNull(cookie); assertEquals("myValue3", cookie.value()); } @Test public void testDecodingQuotedCookie() { String source = "a=\"\";" + "b=\"1\";" + "c=\"\\\"1\\\"2\\\"\";" + "d=\"1\\\"2\\\"3\";" + "e=\"\\\"\\\"\";" + "f=\"1\\\"\\\"2\";" + "g=\"\\\\\";" + "h=\"';,\\x\""; Set<Cookie> cookies = ServerCookieDecoder.decode(source); Iterator<Cookie> it = cookies.iterator(); Cookie c; c = it.next(); assertEquals("a", c.name()); assertEquals("", c.value()); c = it.next(); assertEquals("b", c.name()); assertEquals("1", c.value()); c = it.next(); assertEquals("c", c.name()); assertEquals("\"1\"2\"", c.value()); c = it.next(); assertEquals("d", c.name()); assertEquals("1\"2\"3", c.value()); c = it.next(); assertEquals("e", c.name()); assertEquals("\"\"", c.value()); c = it.next(); assertEquals("f", c.name()); assertEquals("1\"\"2", c.value()); c = it.next(); assertEquals("g", c.name()); assertEquals("\\", c.value()); c = it.next(); assertEquals("h", c.name()); assertEquals("';,\\x", c.value()); assertFalse(it.hasNext()); } @Test public void testDecodingGoogleAnalyticsCookie() { String source = "ARPT=LWUKQPSWRTUN04CKKJI; " + "kw-2E343B92-B097-442c-BFA5-BE371E0325A2=unfinished furniture; " + "__utma=48461872.1094088325.1258140131.1258140131.1258140131.1; " + "__utmb=48461872.13.10.1258140131; __utmc=48461872; " + "__utmz=48461872.1258140131.1.1.utmcsr=overstock.com|utmccn=(referral)|" + "utmcmd=referral|utmcct=/Home-Garden/Furniture/Clearance,/clearance,/32/dept.html"; Set<Cookie> cookies = ServerCookieDecoder.decode(source); Iterator<Cookie> it = cookies.iterator(); Cookie c; c = it.next(); assertEquals("__utma", c.name()); assertEquals("48461872.1094088325.1258140131.1258140131.1258140131.1", c.value()); c = it.next(); assertEquals("__utmb", c.name()); assertEquals("48461872.13.10.1258140131", c.value()); c = it.next(); assertEquals("__utmc", c.name()); assertEquals("48461872", c.value()); c = it.next(); assertEquals("__utmz", c.name()); assertEquals("48461872.1258140131.1.1.utmcsr=overstock.com|" + "utmccn=(referral)|utmcmd=referral|utmcct=/Home-Garden/Furniture/Clearance,/clearance,/32/dept.html", c.value()); c = it.next(); assertEquals("ARPT", c.name()); assertEquals("LWUKQPSWRTUN04CKKJI", c.value()); c = it.next(); assertEquals("kw-2E343B92-B097-442c-BFA5-BE371E0325A2", c.name()); assertEquals("unfinished furniture", c.value()); assertFalse(it.hasNext()); } @Test public void testDecodingLongValue() { String longValue = "b!!!$Q!!$ha!!<NC=MN(F!!%#4!!<NC=MN(F!!2!d!!!!#=IvZB!!2,F!!!!'=KqtH!!2-9!!!!" + "'=IvZM!!3f:!!!!$=HbQW!!3g'!!!!%=J^wI!!3g-!!!!%=J^wI!!3g1!!!!$=HbQW!!3g2!!!!" + "$=HbQW!!3g5!!!!%=J^wI!!3g9!!!!$=HbQW!!3gT!!!!$=HbQW!!3gX!!!!#=J^wI!!3gY!!!!" + "#=J^wI!!3gh!!!!$=HbQW!!3gj!!!!$=HbQW!!3gr!!!!$=HbQW!!3gx!!!!#=J^wI!!3h!!!!!" + "$=HbQW!!3h$!!!!#=J^wI!!3h'!!!!$=HbQW!!3h,!!!!$=HbQW!!3h0!!!!%=J^wI!!3h1!!!!" + "#=J^wI!!3h2!!!!$=HbQW!!3h4!!!!$=HbQW!!3h7!!!!$=HbQW!!3h8!!!!%=J^wI!!3h:!!!!" + "#=J^wI!!3h@!!!!%=J^wI!!3hB!!!!$=HbQW!!3hC!!!!$=HbQW!!3hL!!!!$=HbQW!!3hQ!!!!" + "$=HbQW!!3hS!!!!%=J^wI!!3hU!!!!$=HbQW!!3h[!!!!$=HbQW!!3h^!!!!$=HbQW!!3hd!!!!" + "%=J^wI!!3he!!!!%=J^wI!!3hf!!!!%=J^wI!!3hg!!!!$=HbQW!!3hh!!!!%=J^wI!!3hi!!!!" + "%=J^wI!!3hv!!!!$=HbQW!!3i/!!!!#=J^wI!!3i2!!!!#=J^wI!!3i3!!!!%=J^wI!!3i4!!!!" + "$=HbQW!!3i7!!!!$=HbQW!!3i8!!!!$=HbQW!!3i9!!!!%=J^wI!!3i=!!!!#=J^wI!!3i>!!!!" + "%=J^wI!!3iD!!!!$=HbQW!!3iF!!!!#=J^wI!!3iH!!!!%=J^wI!!3iM!!!!%=J^wI!!3iS!!!!" + "#=J^wI!!3iU!!!!%=J^wI!!3iZ!!!!#=J^wI!!3i]!!!!%=J^wI!!3ig!!!!%=J^wI!!3ij!!!!" + "%=J^wI!!3ik!!!!#=J^wI!!3il!!!!$=HbQW!!3in!!!!%=J^wI!!3ip!!!!$=HbQW!!3iq!!!!" + "$=HbQW!!3it!!!!%=J^wI!!3ix!!!!#=J^wI!!3j!!!!!$=HbQW!!3j%!!!!$=HbQW!!3j'!!!!" + "%=J^wI!!3j(!!!!%=J^wI!!9mJ!!!!'=KqtH!!=SE!!<NC=MN(F!!?VS!!<NC=MN(F!!Zw`!!!!" + "%=KqtH!!j+C!!<NC=MN(F!!j+M!!<NC=MN(F!!j+a!!<NC=MN(F!!j,.!!<NC=MN(F!!n>M!!!!" + "'=KqtH!!s1X!!!!$=MMyc!!s1_!!!!#=MN#O!!ypn!!!!'=KqtH!!ypr!!!!'=KqtH!#%h!!!!!" + "%=KqtH!#%o!!!!!'=KqtH!#)H6!!<NC=MN(F!#*%'!!!!%=KqtH!#+k(!!!!'=KqtH!#-E!!!!!" + "'=KqtH!#1)w!!!!'=KqtH!#1)y!!!!'=KqtH!#1*M!!!!#=KqtH!#1*p!!!!'=KqtH!#14Q!!<N" + "C=MN(F!#14S!!<NC=MN(F!#16I!!<NC=MN(F!#16N!!<NC=MN(F!#16X!!<NC=MN(F!#16k!!<N" + "C=MN(F!#17@!!<NC=MN(F!#17A!!<NC=MN(F!#1Cq!!!!'=KqtH!#7),!!!!#=KqtH!#7)b!!!!" + "#=KqtH!#7Ww!!!!'=KqtH!#?cQ!!!!'=KqtH!#His!!!!'=KqtH!#Jrh!!!!'=KqtH!#O@M!!<N" + "C=MN(F!#O@O!!<NC=MN(F!#OC6!!<NC=MN(F!#Os.!!!!#=KqtH!#YOW!!!!#=H/Li!#Zat!!!!" + "'=KqtH!#ZbI!!!!%=KqtH!#Zbc!!!!'=KqtH!#Zbs!!!!%=KqtH!#Zby!!!!'=KqtH!#Zce!!!!" + "'=KqtH!#Zdc!!!!%=KqtH!#Zea!!!!'=KqtH!#ZhI!!!!#=KqtH!#ZiD!!!!'=KqtH!#Zis!!!!" + "'=KqtH!#Zj0!!!!#=KqtH!#Zj1!!!!'=KqtH!#Zj[!!!!'=KqtH!#Zj]!!!!'=KqtH!#Zj^!!!!" + "'=KqtH!#Zjb!!!!'=KqtH!#Zk!!!!!'=KqtH!#Zk6!!!!#=KqtH!#Zk9!!!!%=KqtH!#Zk<!!!!" + "'=KqtH!#Zl>!!!!'=KqtH!#]9R!!!!$=H/Lt!#]I6!!!!#=KqtH!#]Z#!!!!%=KqtH!#^*N!!!!" + "#=KqtH!#^:m!!!!#=KqtH!#_*_!!!!%=J^wI!#`-7!!!!#=KqtH!#`T>!!!!'=KqtH!#`T?!!!!" + "'=KqtH!#`TA!!!!'=KqtH!#`TB!!!!'=KqtH!#`TG!!!!'=KqtH!#`TP!!!!#=KqtH!#`U,!!!!" + "'=KqtH!#`U/!!!!'=KqtH!#`U0!!!!#=KqtH!#`U9!!!!'=KqtH!#aEQ!!!!%=KqtH!#b<)!!!!" + "'=KqtH!#c9-!!!!%=KqtH!#dxC!!!!%=KqtH!#dxE!!!!%=KqtH!#ev$!!!!'=KqtH!#fBi!!!!" + "#=KqtH!#fBj!!!!'=KqtH!#fG)!!!!'=KqtH!#fG+!!!!'=KqtH!#g<d!!!!'=KqtH!#g<e!!!!" + "'=KqtH!#g=J!!!!'=KqtH!#gat!!!!#=KqtH!#s`D!!!!#=J_#p!#sg?!!!!#=J_#p!#t<a!!!!" + "#=KqtH!#t<c!!!!#=KqtH!#trY!!!!$=JiYj!#vA$!!!!'=KqtH!#xs_!!!!'=KqtH!$$rO!!!!" + "#=KqtH!$$rP!!!!#=KqtH!$(!%!!!!'=KqtH!$)]o!!!!%=KqtH!$,@)!!!!'=KqtH!$,k]!!!!" + "'=KqtH!$1]+!!!!%=KqtH!$3IO!!!!%=KqtH!$3J#!!!!'=KqtH!$3J.!!!!'=KqtH!$3J:!!!!" + "#=KqtH!$3JH!!!!#=KqtH!$3JI!!!!#=KqtH!$3JK!!!!%=KqtH!$3JL!!!!'=KqtH!$3JS!!!!" + "'=KqtH!$8+M!!!!#=KqtH!$99d!!!!%=KqtH!$:Lw!!!!#=LK+x!$:N@!!!!#=KqtG!$:NC!!!!" + "#=KqtG!$:hW!!!!'=KqtH!$:i[!!!!'=KqtH!$:ih!!!!'=KqtH!$:it!!!!'=KqtH!$:kO!!!!" + "'=KqtH!$>*B!!!!'=KqtH!$>hD!!!!+=J^x0!$?lW!!!!'=KqtH!$?ll!!!!'=KqtH!$?lm!!!!" + "%=KqtH!$?mi!!!!'=KqtH!$?mx!!!!'=KqtH!$D7]!!!!#=J_#p!$D@T!!!!#=J_#p!$V<g!!!!" + "'=KqtH"; Set<Cookie> cookies = ServerCookieDecoder.decode("bh=\"" + longValue + "\";"); assertEquals(1, cookies.size()); Cookie c = cookies.iterator().next(); assertEquals("bh", c.name()); assertEquals(longValue, c.value()); } @Test public void testDecodingOldRFC2965Cookies() { String source = "$Version=\"1\"; " + "Part_Number1=\"Riding_Rocket_0023\"; $Path=\"/acme/ammo\"; " + "Part_Number2=\"Rocket_Launcher_0001\"; $Path=\"/acme\""; Set<Cookie> cookies = ServerCookieDecoder.decode(source); Iterator<Cookie> it = cookies.iterator(); Cookie c; c = it.next(); assertEquals("Part_Number1", c.name()); assertEquals("Riding_Rocket_0023", c.value()); c = it.next(); assertEquals("Part_Number2", c.name()); assertEquals("Rocket_Launcher_0001", c.value()); assertFalse(it.hasNext()); } }
// File generated from our OpenAPI spec package com.stripe.param; import com.google.gson.annotations.SerializedName; import com.stripe.net.ApiRequestParams; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import lombok.Getter; @Getter public class ShippingRateListParams extends ApiRequestParams { /** Only return shipping rates that are active or inactive. */ @SerializedName("active") Boolean active; /** * A filter on the list, based on the object {@code created} field. The value can be a string with * an integer Unix timestamp, or it can be a dictionary with a number of different query options. */ @SerializedName("created") Object created; /** Only return shipping rates for the given currency. */ @SerializedName("currency") String currency; /** * A cursor for use in pagination. {@code ending_before} is an object ID that defines your place * in the list. For instance, if you make a list request and receive 100 objects, starting with * {@code obj_bar}, your subsequent call can include {@code ending_before=obj_bar} in order to * fetch the previous page of the list. */ @SerializedName("ending_before") String endingBefore; /** Specifies which fields in the response should be expanded. */ @SerializedName("expand") List<String> expand; /** * Map of extra parameters for custom features not available in this client library. The content * in this map is not serialized under this field's {@code @SerializedName} value. Instead, each * key/value pair is serialized as if the key is a root-level field (serialized) name in this * param object. Effectively, this map is flattened to its parent instance. */ @SerializedName(ApiRequestParams.EXTRA_PARAMS_KEY) Map<String, Object> extraParams; /** * A limit on the number of objects to be returned. Limit can range between 1 and 100, and the * default is 10. */ @SerializedName("limit") Long limit; /** * A cursor for use in pagination. {@code starting_after} is an object ID that defines your place * in the list. For instance, if you make a list request and receive 100 objects, ending with * {@code obj_foo}, your subsequent call can include {@code starting_after=obj_foo} in order to * fetch the next page of the list. */ @SerializedName("starting_after") String startingAfter; private ShippingRateListParams( Boolean active, Object created, String currency, String endingBefore, List<String> expand, Map<String, Object> extraParams, Long limit, String startingAfter) { this.active = active; this.created = created; this.currency = currency; this.endingBefore = endingBefore; this.expand = expand; this.extraParams = extraParams; this.limit = limit; this.startingAfter = startingAfter; } public static Builder builder() { return new Builder(); } public static class Builder { private Boolean active; private Object created; private String currency; private String endingBefore; private List<String> expand; private Map<String, Object> extraParams; private Long limit; private String startingAfter; /** Finalize and obtain parameter instance from this builder. */ public ShippingRateListParams build() { return new ShippingRateListParams( this.active, this.created, this.currency, this.endingBefore, this.expand, this.extraParams, this.limit, this.startingAfter); } /** Only return shipping rates that are active or inactive. */ public Builder setActive(Boolean active) { this.active = active; return this; } /** * A filter on the list, based on the object {@code created} field. The value can be a string * with an integer Unix timestamp, or it can be a dictionary with a number of different query * options. */ public Builder setCreated(Created created) { this.created = created; return this; } /** * A filter on the list, based on the object {@code created} field. The value can be a string * with an integer Unix timestamp, or it can be a dictionary with a number of different query * options. */ public Builder setCreated(Long created) { this.created = created; return this; } /** Only return shipping rates for the given currency. */ public Builder setCurrency(String currency) { this.currency = currency; return this; } /** * A cursor for use in pagination. {@code ending_before} is an object ID that defines your place * in the list. For instance, if you make a list request and receive 100 objects, starting with * {@code obj_bar}, your subsequent call can include {@code ending_before=obj_bar} in order to * fetch the previous page of the list. */ public Builder setEndingBefore(String endingBefore) { this.endingBefore = endingBefore; return this; } /** * Add an element to `expand` list. A list is initialized for the first `add/addAll` call, and * subsequent calls adds additional elements to the original list. See {@link * ShippingRateListParams#expand} for the field documentation. */ public Builder addExpand(String element) { if (this.expand == null) { this.expand = new ArrayList<>(); } this.expand.add(element); return this; } /** * Add all elements to `expand` list. A list is initialized for the first `add/addAll` call, and * subsequent calls adds additional elements to the original list. See {@link * ShippingRateListParams#expand} for the field documentation. */ public Builder addAllExpand(List<String> elements) { if (this.expand == null) { this.expand = new ArrayList<>(); } this.expand.addAll(elements); return this; } /** * Add a key/value pair to `extraParams` map. A map is initialized for the first `put/putAll` * call, and subsequent calls add additional key/value pairs to the original map. See {@link * ShippingRateListParams#extraParams} for the field documentation. */ public Builder putExtraParam(String key, Object value) { if (this.extraParams == null) { this.extraParams = new HashMap<>(); } this.extraParams.put(key, value); return this; } /** * Add all map key/value pairs to `extraParams` map. A map is initialized for the first * `put/putAll` call, and subsequent calls add additional key/value pairs to the original map. * See {@link ShippingRateListParams#extraParams} for the field documentation. */ public Builder putAllExtraParam(Map<String, Object> map) { if (this.extraParams == null) { this.extraParams = new HashMap<>(); } this.extraParams.putAll(map); return this; } /** * A limit on the number of objects to be returned. Limit can range between 1 and 100, and the * default is 10. */ public Builder setLimit(Long limit) { this.limit = limit; return this; } /** * A cursor for use in pagination. {@code starting_after} is an object ID that defines your * place in the list. For instance, if you make a list request and receive 100 objects, ending * with {@code obj_foo}, your subsequent call can include {@code starting_after=obj_foo} in * order to fetch the next page of the list. */ public Builder setStartingAfter(String startingAfter) { this.startingAfter = startingAfter; return this; } } @Getter public static class Created { /** * Map of extra parameters for custom features not available in this client library. The content * in this map is not serialized under this field's {@code @SerializedName} value. Instead, each * key/value pair is serialized as if the key is a root-level field (serialized) name in this * param object. Effectively, this map is flattened to its parent instance. */ @SerializedName(ApiRequestParams.EXTRA_PARAMS_KEY) Map<String, Object> extraParams; /** Minimum value to filter by (exclusive). */ @SerializedName("gt") Long gt; /** Minimum value to filter by (inclusive). */ @SerializedName("gte") Long gte; /** Maximum value to filter by (exclusive). */ @SerializedName("lt") Long lt; /** Maximum value to filter by (inclusive). */ @SerializedName("lte") Long lte; private Created(Map<String, Object> extraParams, Long gt, Long gte, Long lt, Long lte) { this.extraParams = extraParams; this.gt = gt; this.gte = gte; this.lt = lt; this.lte = lte; } public static Builder builder() { return new Builder(); } public static class Builder { private Map<String, Object> extraParams; private Long gt; private Long gte; private Long lt; private Long lte; /** Finalize and obtain parameter instance from this builder. */ public Created build() { return new Created(this.extraParams, this.gt, this.gte, this.lt, this.lte); } /** * Add a key/value pair to `extraParams` map. A map is initialized for the first `put/putAll` * call, and subsequent calls add additional key/value pairs to the original map. See {@link * ShippingRateListParams.Created#extraParams} for the field documentation. */ public Builder putExtraParam(String key, Object value) { if (this.extraParams == null) { this.extraParams = new HashMap<>(); } this.extraParams.put(key, value); return this; } /** * Add all map key/value pairs to `extraParams` map. A map is initialized for the first * `put/putAll` call, and subsequent calls add additional key/value pairs to the original map. * See {@link ShippingRateListParams.Created#extraParams} for the field documentation. */ public Builder putAllExtraParam(Map<String, Object> map) { if (this.extraParams == null) { this.extraParams = new HashMap<>(); } this.extraParams.putAll(map); return this; } /** Minimum value to filter by (exclusive). */ public Builder setGt(Long gt) { this.gt = gt; return this; } /** Minimum value to filter by (inclusive). */ public Builder setGte(Long gte) { this.gte = gte; return this; } /** Maximum value to filter by (exclusive). */ public Builder setLt(Long lt) { this.lt = lt; return this; } /** Maximum value to filter by (inclusive). */ public Builder setLte(Long lte) { this.lte = lte; return this; } } } }
/* * Autopsy Forensic Browser * * Copyright 2012-2015 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.coreutils; import java.io.IOException; import java.nio.file.Paths; import java.util.logging.FileHandler; import java.util.logging.Formatter; import java.util.logging.Handler; import java.sql.Timestamp; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.logging.LogRecord; /** * Autopsy specialization of the Java Logger class with custom file handlers. * Note that the custom loggers are not obtained from the global log manager. */ public final class Logger extends java.util.logging.Logger { private static final String LOG_ENCODING = PlatformUtil.getLogFileEncoding(); private static final int LOG_SIZE = 0; // In bytes, zero is unlimited private static final int LOG_FILE_COUNT = 10; private static final String LOG_WITHOUT_STACK_TRACES = "autopsy.log"; //NON-NLS private static final String LOG_WITH_STACK_TRACES = "autopsy_traces.log"; //NON-NLS private static final Map<String, Logger> namesToLoggers = new HashMap<>(); private static final Handler consoleHandler = new java.util.logging.ConsoleHandler(); private static FileHandler userFriendlyHandler = createFileHandlerWithoutTraces(PlatformUtil.getLogDirectory()); private static FileHandler developerFriendlyHandler = createFileHandlerWithTraces(PlatformUtil.getLogDirectory()); /** * Creates a custom file handler with a custom message formatter that does * not include stack traces. * * @param logDirectory The directory where the log files should reside. * * @return A custom file handler. */ private static FileHandler createFileHandlerWithoutTraces(String logDirectory) { String logFilePath = Paths.get(logDirectory, LOG_WITHOUT_STACK_TRACES).toString(); try { FileHandler fileHandler = new FileHandler(logFilePath, LOG_SIZE, LOG_FILE_COUNT); fileHandler.setEncoding(LOG_ENCODING); fileHandler.setFormatter(new Formatter() { @Override public String format(LogRecord record) { return (new Date(record.getMillis())).toString() + " " + record.getSourceClassName() + " " + record.getSourceMethodName() + "\n" + record.getLevel() + ": " + this.formatMessage(record) + "\n"; } }); return fileHandler; } catch (IOException ex) { throw new RuntimeException(String.format("Error initializing file handler for %s", logFilePath), ex); //NON-NLS } } /** * Creates a custom file handler with a custom message formatter that * incldues stack traces. * * @param logDirectory The directory where the log files should reside. * * @return A custom file handler. */ private static FileHandler createFileHandlerWithTraces(String logDirectory) { String logFilePath = Paths.get(logDirectory, LOG_WITH_STACK_TRACES).toString(); try { FileHandler fileHandler = new FileHandler(logFilePath, LOG_SIZE, LOG_FILE_COUNT); fileHandler.setEncoding(LOG_ENCODING); fileHandler.setFormatter(new Formatter() { @Override public String format(LogRecord record) { if (record.getThrown() != null) { String stackTrace = ""; //NON-NLS for (StackTraceElement traceElem : record.getThrown().getStackTrace()) { stackTrace += "\t" + traceElem.toString() + "\n"; //NON-NLS } return (new Timestamp(record.getMillis())).toString() + " " //NON-NLS + record.getSourceClassName() + " " //NON-NLS + record.getSourceMethodName() + "\n" //NON-NLS + record.getLevel() + ": " //NON-NLS + this.formatMessage(record) + "\n" //NON-NLS + record.getThrown().toString() + ":\n" //NON-NLS + stackTrace + "\n"; //NON-NLS } else { return (new Timestamp(record.getMillis())).toString() + " " //NON-NLS + record.getSourceClassName() + " " //NON-NLS + record.getSourceMethodName() + "\n" //NON-NLS + record.getLevel() + ": " //NON-NLS + this.formatMessage(record) + "\n"; //NON-NLS } } }); return fileHandler; } catch (IOException ex) { throw new RuntimeException(String.format("Error initializing file handler for %s", logFilePath), ex); //NON-NLS } } /** * Sets the log directory where the log files will be written. * * @param directoryPath The path to the desired log directory as a string. */ synchronized public static void setLogDirectory(String directoryPath) { /* * Create file handlers for the new directory and swap them into all of * the existing loggers using thread-safe Logger methods. The new * handlers are added before the old handlers so that no messages will * be lost, but this makes it possible for log messages to be written * via the old handlers if logging calls are interleaved with the * add/remove handler calls (currently, the base class handlers * collection is a CopyOnWriteArrayList). */ FileHandler newUserFriendlyHandler = createFileHandlerWithoutTraces(directoryPath); FileHandler newDeveloperFriendlyHandler = createFileHandlerWithTraces(directoryPath); for (Logger logger : namesToLoggers.values()) { logger.addHandler(newUserFriendlyHandler); logger.addHandler(newDeveloperFriendlyHandler); logger.removeHandler(userFriendlyHandler); logger.removeHandler(userFriendlyHandler); } /* * Close the old file handlers and save references to the new handlers * so they can be added to any new loggers. This swap is why this method * and the two overloads of getLogger() are synchronized, serializing * access to userFriendlyHandler and developerFriendlyHandler. */ userFriendlyHandler.close(); userFriendlyHandler = newUserFriendlyHandler; developerFriendlyHandler.close(); developerFriendlyHandler = newDeveloperFriendlyHandler; } /** * Finds or creates a customized logger. Hides the base class factory * method. * * @param name A name for the logger. This should normally be a * dot-separated name based on a package name or class name. * * @return org.sleuthkit.autopsy.coreutils.Logger instance */ synchronized public static Logger getLogger(String name) { return getLogger(name, null); } /** * Finds or creates a customized logger. Hides the base class factory * method. * * @param name A name for the logger. This should normally be * a dot-separated name based on a package name or * class name. * @param resourceBundleName Name of ResourceBundle to be used for * localizing messages for this logger. May be * null. * * @return org.sleuthkit.autopsy.coreutils.Logger instance */ synchronized public static Logger getLogger(String name, String resourceBundleName) { if (!namesToLoggers.containsKey(name)) { Logger logger = new Logger(name, resourceBundleName); logger.addHandler(userFriendlyHandler); logger.addHandler(developerFriendlyHandler); namesToLoggers.put(name, logger); } return namesToLoggers.get(name); } /** * Constructs a customized logger. * * @param name A name for the logger. This should normally be * a dot-separated name based on a package name or * class name. * @param resourceBundleName Name of ResourceBundle to be used for * localizing messages for this logger. May be * null. */ private Logger(String name, String resourceBundleName) { super(name, resourceBundleName); super.setUseParentHandlers(false); if (Version.getBuildType() == Version.Type.DEVELOPMENT) { super.addHandler(consoleHandler); } } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.subsystem.server.extension; import org.jboss.as.controller.AttributeDefinition; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.PathElement; import org.jboss.as.controller.PropertiesAttributeDefinition; import org.jboss.as.controller.SimpleAttributeDefinition; import org.jboss.as.controller.descriptions.ModelDescriptionConstants; import org.jboss.as.controller.operations.common.Util; import org.jboss.as.controller.parsing.ParseUtils; import org.jboss.as.controller.persistence.SubsystemMarshallingContext; import org.jboss.dmr.ModelNode; import org.jboss.dmr.Property; import org.jboss.staxmapper.XMLElementReader; import org.jboss.staxmapper.XMLElementWriter; import org.jboss.staxmapper.XMLExtendedStreamReader; import org.jboss.staxmapper.XMLExtendedStreamWriter; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import java.util.List; import static org.keycloak.subsystem.server.extension.KeycloakExtension.PATH_SUBSYSTEM; import static org.keycloak.subsystem.server.extension.KeycloakSubsystemDefinition.MASTER_REALM_NAME; import static org.keycloak.subsystem.server.extension.KeycloakSubsystemDefinition.PROVIDERS; import static org.keycloak.subsystem.server.extension.KeycloakSubsystemDefinition.SCHEDULED_TASK_INTERVAL; import static org.keycloak.subsystem.server.extension.KeycloakSubsystemDefinition.WEB_CONTEXT; import static org.keycloak.subsystem.server.extension.ProviderResourceDefinition.ENABLED; import static org.keycloak.subsystem.server.extension.ProviderResourceDefinition.PROPERTIES; import static org.keycloak.subsystem.server.extension.SpiResourceDefinition.DEFAULT_PROVIDER; import static org.keycloak.subsystem.server.extension.ThemeResourceDefinition.MODULES; /** * The subsystem parser, which uses stax to read and write to and from xml */ class KeycloakSubsystemParser implements XMLStreamConstants, XMLElementReader<List<ModelNode>>, XMLElementWriter<SubsystemMarshallingContext> { /** * {@inheritDoc} */ @Override public void readElement(final XMLExtendedStreamReader reader, final List<ModelNode> list) throws XMLStreamException { // Require no attributes ParseUtils.requireNoAttributes(reader); ModelNode addKeycloakSub = Util.createAddOperation(PathAddress.pathAddress(PATH_SUBSYSTEM)); list.add(addKeycloakSub); while (reader.hasNext() && nextTag(reader) != END_ELEMENT) { if (reader.getLocalName().equals(WEB_CONTEXT.getXmlName())) { WEB_CONTEXT.parseAndSetParameter(reader.getElementText(), addKeycloakSub, reader); } else if (reader.getLocalName().equals(PROVIDERS.getXmlName())) { readProviders(reader, addKeycloakSub); } else if (reader.getLocalName().equals(MASTER_REALM_NAME.getXmlName())) { MASTER_REALM_NAME.parseAndSetParameter(reader.getElementText(), addKeycloakSub, reader); } else if (reader.getLocalName().equals(SCHEDULED_TASK_INTERVAL.getXmlName())) { SCHEDULED_TASK_INTERVAL.parseAndSetParameter(reader.getElementText(), addKeycloakSub, reader); } else if (reader.getLocalName().equals(ThemeResourceDefinition.TAG_NAME)) { readTheme(list, reader); } else if (reader.getLocalName().equals(SpiResourceDefinition.TAG_NAME)) { readSpi(list, reader); } else { throw new XMLStreamException("Unknown keycloak-server subsystem tag: " + reader.getLocalName()); } } } private void readProviders(final XMLExtendedStreamReader reader, ModelNode addKeycloakSub) throws XMLStreamException { while (reader.hasNext() && nextTag(reader) != END_ELEMENT) { PROVIDERS.parseAndAddParameterElement(reader.getElementText(),addKeycloakSub, reader); } } private void readTheme(final List<ModelNode> list, final XMLExtendedStreamReader reader) throws XMLStreamException { ModelNode addThemeDefaults = new ModelNode(); addThemeDefaults.get(ModelDescriptionConstants.OP).set(ModelDescriptionConstants.ADD); PathAddress addr = PathAddress.pathAddress(PathElement.pathElement(ModelDescriptionConstants.SUBSYSTEM, KeycloakExtension.SUBSYSTEM_NAME), PathElement.pathElement(ThemeResourceDefinition.TAG_NAME, ThemeResourceDefinition.RESOURCE_NAME)); addThemeDefaults.get(ModelDescriptionConstants.OP_ADDR).set(addr.toModelNode()); list.add(addThemeDefaults); while (reader.hasNext() && nextTag(reader) != END_ELEMENT) { String tagName = reader.getLocalName(); if (MODULES.getName().equals(tagName)) { readModules(reader, addThemeDefaults); continue; } SimpleAttributeDefinition def = KeycloakExtension.THEME_DEFINITION.lookup(tagName); if (def == null) throw new XMLStreamException("Unknown theme tag " + tagName); def.parseAndSetParameter(reader.getElementText(), addThemeDefaults, reader); } } private void readModules(final XMLExtendedStreamReader reader, ModelNode addThemeDefaults) throws XMLStreamException { while (reader.hasNext() && nextTag(reader) != END_ELEMENT) { MODULES.parseAndAddParameterElement(reader.getElementText(),addThemeDefaults, reader); } } private void readSpi(final List<ModelNode> list, final XMLExtendedStreamReader reader) throws XMLStreamException { String spiName = ParseUtils.requireAttributes(reader, "name")[0]; ModelNode addSpi = new ModelNode(); addSpi.get(ModelDescriptionConstants.OP).set(ModelDescriptionConstants.ADD); PathAddress addr = PathAddress.pathAddress(PathElement.pathElement(ModelDescriptionConstants.SUBSYSTEM, KeycloakExtension.SUBSYSTEM_NAME), PathElement.pathElement(SpiResourceDefinition.TAG_NAME, spiName)); addSpi.get(ModelDescriptionConstants.OP_ADDR).set(addr.toModelNode()); list.add(addSpi); while (reader.hasNext() && nextTag(reader) != END_ELEMENT) { if (reader.getLocalName().equals(DEFAULT_PROVIDER.getXmlName())) { DEFAULT_PROVIDER.parseAndSetParameter(reader.getElementText(), addSpi, reader); } else if (reader.getLocalName().equals(ProviderResourceDefinition.TAG_NAME)) { readProvider(list, spiName, reader); } } } private void readProvider(final List<ModelNode> list, String spiName, final XMLExtendedStreamReader reader) throws XMLStreamException { String[] attributes = ParseUtils.requireAttributes(reader, "name", ENABLED.getXmlName()); String providerName = attributes[0]; String enabled = attributes[1]; ModelNode addProvider = new ModelNode(); addProvider.get(ModelDescriptionConstants.OP).set(ModelDescriptionConstants.ADD); PathAddress addr = PathAddress.pathAddress(PathElement.pathElement(ModelDescriptionConstants.SUBSYSTEM, KeycloakExtension.SUBSYSTEM_NAME), PathElement.pathElement(SpiResourceDefinition.TAG_NAME, spiName), PathElement.pathElement(ProviderResourceDefinition.TAG_NAME, providerName)); addProvider.get(ModelDescriptionConstants.OP_ADDR).set(addr.toModelNode()); addProvider.get(ENABLED.getName()).set(Boolean.valueOf(enabled)); list.add(addProvider); while (nextTag(reader) != END_ELEMENT) { if (reader.getLocalName().equals(PROPERTIES.getXmlName())) { readProperties(PROPERTIES, addProvider, reader); } } } private void readProperties(final PropertiesAttributeDefinition attrDef, ModelNode addOp, final XMLExtendedStreamReader reader) throws XMLStreamException { while (nextTag(reader) != END_ELEMENT) { int attrCount = reader.getAttributeCount(); if (attrCount != 2) throw new XMLStreamException("Property must have only two attributes"); String name = ""; String value = ""; for (int i=0 ; i < 2; i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); if (attrName.equals("name")) { name = attrValue; } else if (attrName.equals("value")) { value = attrValue; } else { throw new XMLStreamException("Property can only have attributes named 'name' and 'value'"); } } attrDef.parseAndAddParameterElement(name, value, addOp, reader); nextTag(reader); } } // used for debugging private int nextTag(XMLExtendedStreamReader reader) throws XMLStreamException { return reader.nextTag(); } /** * {@inheritDoc} */ @Override public void writeContent(final XMLExtendedStreamWriter writer, final SubsystemMarshallingContext context) throws XMLStreamException { context.startSubsystemElement(KeycloakExtension.NAMESPACE, false); writeWebContext(writer, context); writeList(writer, context.getModelNode(), PROVIDERS, "provider"); writeAdmin(writer, context); writeScheduledTaskInterval(writer, context); writeThemeDefaults(writer, context); writeSpis(writer, context); writer.writeEndElement(); } private void writeThemeDefaults(XMLExtendedStreamWriter writer, SubsystemMarshallingContext context) throws XMLStreamException { if (!context.getModelNode().get(ThemeResourceDefinition.TAG_NAME).isDefined()) { return; } writer.writeStartElement(ThemeResourceDefinition.TAG_NAME); ModelNode themeElements = context.getModelNode().get(ThemeResourceDefinition.TAG_NAME, ThemeResourceDefinition.RESOURCE_NAME); for (AttributeDefinition def : ThemeResourceDefinition.ALL_ATTRIBUTES) { if (themeElements.hasDefined(def.getName())) { if (def == MODULES) { ModelNode themeContext = context.getModelNode().get("theme", "defaults"); writeList(writer, themeContext, def, "module"); } else { def.marshallAsElement(themeElements, writer); } } } writer.writeEndElement(); } private void writeSpis(XMLExtendedStreamWriter writer, SubsystemMarshallingContext context) throws XMLStreamException { if (!context.getModelNode().get(SpiResourceDefinition.TAG_NAME).isDefined()) { return; } for (Property spi : context.getModelNode().get(SpiResourceDefinition.TAG_NAME).asPropertyList()) { writer.writeStartElement(SpiResourceDefinition.TAG_NAME); writer.writeAttribute("name", spi.getName()); ModelNode spiElements = spi.getValue(); DEFAULT_PROVIDER.marshallAsElement(spiElements, writer); writeProviders(writer, spiElements); writer.writeEndElement(); } } private void writeProviders(XMLExtendedStreamWriter writer, ModelNode spiElements) throws XMLStreamException { if (!spiElements.get(ProviderResourceDefinition.TAG_NAME).isDefined()) { return; } for (Property provider : spiElements.get(ProviderResourceDefinition.TAG_NAME).asPropertyList()) { writer.writeStartElement(ProviderResourceDefinition.TAG_NAME); writer.writeAttribute("name", provider.getName()); ModelNode providerElements = provider.getValue(); ENABLED.marshallAsAttribute(providerElements, writer); PROPERTIES.marshallAsElement(providerElements, writer); writer.writeEndElement(); } } private void writeWebContext(XMLExtendedStreamWriter writer, SubsystemMarshallingContext context) throws XMLStreamException { if (!context.getModelNode().get(WEB_CONTEXT.getName()).isDefined()) { return; } WEB_CONTEXT.marshallAsElement(context.getModelNode(), writer); } private void writeAdmin(XMLExtendedStreamWriter writer, SubsystemMarshallingContext context) throws XMLStreamException { if (!context.getModelNode().get(MASTER_REALM_NAME.getName()).isDefined()) { return; } MASTER_REALM_NAME.marshallAsElement(context.getModelNode(), writer); } private void writeScheduledTaskInterval(XMLExtendedStreamWriter writer, SubsystemMarshallingContext context) throws XMLStreamException { if (!context.getModelNode().get(SCHEDULED_TASK_INTERVAL.getName()).isDefined()) { return; } SCHEDULED_TASK_INTERVAL.marshallAsElement(context.getModelNode(), writer); } private void writeList(XMLExtendedStreamWriter writer, ModelNode context, AttributeDefinition def, String elementName) throws XMLStreamException { if (!context.get(def.getName()).isDefined()) { return; } writer.writeStartElement(def.getXmlName()); ModelNode modules = context.get(def.getName()); for (ModelNode module : modules.asList()) { writer.writeStartElement(elementName); writer.writeCharacters(module.asString()); writer.writeEndElement(); } writer.writeEndElement(); } }
/*PLEASE DO NOT EDIT THIS CODE*/ /*This code was generated using the UMPLE 1.15.0.1751 modeling language!*/ package service; import java.util.List; import java.util.ArrayList; import shared.domain.Election; import java.sql.Connection; import java.sql.ResultSet; import java.sql.Statement; import java.sql.DriverManager; import shared.Credentials; public class ElectionService { //------------------------ // STATIC VARIABLES //------------------------ private static ElectionService theInstance = null; //------------------------ // MEMBER VARIABLES //------------------------ //ElectionService Attributes private Election newElection; private List<Election> elections; private Connection theConnection; private boolean electionAdded; private String electionNameToSearch; private boolean electionFound; //ElectionService State Machines enum ElectionServiceCycle { Idle, LoadingAllElections, CreatingElection, FindingElectionByName } private ElectionServiceCycle ElectionServiceCycle; //------------------------ // CONSTRUCTOR //------------------------ private ElectionService() { electionAdded = false; electionNameToSearch = null; electionFound = false; setElectionServiceCycle(ElectionServiceCycle.Idle); } public static ElectionService getInstance() { if(theInstance == null) { theInstance = new ElectionService(); } return theInstance; } //------------------------ // INTERFACE //------------------------ public boolean setNewElection(Election aNewElection) { boolean wasSet = false; newElection = aNewElection; wasSet = true; createElection(); return wasSet; } public boolean setElections(List<Election> aElections) { boolean wasSet = false; elections = aElections; wasSet = true; return wasSet; } public boolean setElectionAdded(boolean aElectionAdded) { boolean wasSet = false; electionAdded = aElectionAdded; wasSet = true; return wasSet; } public boolean setElectionNameToSearch(String aElectionNameToSearch) { boolean wasSet = false; electionNameToSearch = aElectionNameToSearch; wasSet = true; findElectionByName(); return wasSet; } public boolean setElectionFound(boolean aElectionFound) { boolean wasSet = false; electionFound = aElectionFound; wasSet = true; return wasSet; } public Election getNewElection() { return newElection; } public List<Election> getElections() { return elections; } public boolean getElectionAdded() { return electionAdded; } public String getElectionNameToSearch() { return electionNameToSearch; } public boolean getElectionFound() { return electionFound; } public boolean isElectionAdded() { return electionAdded; } public boolean isElectionFound() { return electionFound; } public String getElectionServiceCycleFullName() { String answer = ElectionServiceCycle.toString(); return answer; } public ElectionServiceCycle getElectionServiceCycle() { return ElectionServiceCycle; } public boolean getAllElections() { boolean wasEventProcessed = false; ElectionServiceCycle aElectionServiceCycle = ElectionServiceCycle; switch (aElectionServiceCycle) { case Idle: setElectionServiceCycle(ElectionServiceCycle.LoadingAllElections); wasEventProcessed = true; break; } return wasEventProcessed; } public boolean createElection() { boolean wasEventProcessed = false; ElectionServiceCycle aElectionServiceCycle = ElectionServiceCycle; switch (aElectionServiceCycle) { case Idle: setElectionServiceCycle(ElectionServiceCycle.CreatingElection); wasEventProcessed = true; break; } return wasEventProcessed; } public boolean findElectionByName() { boolean wasEventProcessed = false; ElectionServiceCycle aElectionServiceCycle = ElectionServiceCycle; switch (aElectionServiceCycle) { case Idle: setElectionServiceCycle(ElectionServiceCycle.FindingElectionByName); wasEventProcessed = true; break; } return wasEventProcessed; } private boolean __autotransition98__() { boolean wasEventProcessed = false; ElectionServiceCycle aElectionServiceCycle = ElectionServiceCycle; switch (aElectionServiceCycle) { case LoadingAllElections: setElectionServiceCycle(ElectionServiceCycle.Idle); wasEventProcessed = true; break; } return wasEventProcessed; } private boolean __autotransition99__() { boolean wasEventProcessed = false; ElectionServiceCycle aElectionServiceCycle = ElectionServiceCycle; switch (aElectionServiceCycle) { case CreatingElection: setElectionServiceCycle(ElectionServiceCycle.Idle); wasEventProcessed = true; break; } return wasEventProcessed; } private boolean __autotransition100__() { boolean wasEventProcessed = false; ElectionServiceCycle aElectionServiceCycle = ElectionServiceCycle; switch (aElectionServiceCycle) { case FindingElectionByName: setElectionServiceCycle(ElectionServiceCycle.Idle); wasEventProcessed = true; break; } return wasEventProcessed; } private void setElectionServiceCycle(ElectionServiceCycle aElectionServiceCycle) { try { Class.forName("com.mysql.jdbc.Driver").newInstance(); theConnection = DriverManager.getConnection("jdbc:mysql://"+Credentials.db_hostname+"/elections", Credentials.db_username, Credentials.db_password); } catch(Exception e) { System.err.println("Exception: " + e.getMessage()); } ElectionServiceCycle = aElectionServiceCycle; // entry actions and do activities switch(ElectionServiceCycle) { case LoadingAllElections: loadAllElections(); __autotransition98__(); break; case CreatingElection: addElection(); __autotransition99__(); break; case FindingElectionByName: tryFindingElectionByName(); __autotransition100__(); break; } } public void delete() {} public void loadAllElections(){ elections=new ArrayList<Election>(); try { Statement stmt = theConnection.createStatement(); ResultSet rs = stmt.executeQuery("SELECT * FROM election"); while (rs.next()) { String name = rs.getString("name"); String description = rs.getString("description"); int id=Integer.parseInt(rs.getString("id_election")); Election election=new Election(id, name, description, null); elections.add(election); } } catch(Exception e) { System.err.println("Exception: " + e.getMessage()); } } public void addElection(){ try { Statement stmt = theConnection.createStatement(); stmt.executeUpdate("insert into elections.election (name, description) values ('"+newElection.getName()+"', '"+newElection.getDescription()+"')"); electionAdded=true; } catch(Exception e) { System.err.println("Exception: " + e.getMessage()); electionAdded=false; } } //------------------------ // DEVELOPER CODE - PROVIDED AS-IS //------------------------ private void tryFindingElectionByName() { electionFound=true; try { Statement stmt = theConnection.createStatement(); ResultSet rs = stmt.executeQuery("SELECT * FROM election where name='"+electionNameToSearch+"'"); if (!rs.next()) electionFound=false; } catch(Exception e) { System.err.println("Exception: " + e.getMessage()); electionFound=false; } } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Daniel Dyer, id:cactusman, Tom Huybrechts, Yahoo!, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.tasks.junit; import hudson.AbortException; import hudson.Util; import hudson.model.AbstractBuild; import hudson.model.Run; import hudson.tasks.test.MetaTabulatedResult; import hudson.tasks.test.TestObject; import hudson.tasks.test.AbstractTestResultAction; import hudson.util.IOException2; import org.apache.tools.ant.DirectoryScanner; import org.dom4j.DocumentException; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.export.Exported; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.logging.Logger; /** * Root of all the test results for one build. * * @author Kohsuke Kawaguchi */ public final class TestResult extends MetaTabulatedResult { private static final Logger LOGGER = Logger.getLogger(TestResult.class.getName()); /** * List of all {@link SuiteResult}s in this test. * This is the core data structure to be persisted in the disk. */ private final List<SuiteResult> suites = new ArrayList<SuiteResult>(); /** * {@link #suites} keyed by their names for faster lookup. */ private transient Map<String,SuiteResult> suitesByName; /** * Results tabulated by package. */ private transient Map<String,PackageResult> byPackages; // set during the freeze phase private transient AbstractTestResultAction parentAction; private transient TestObject parent; /** * Number of all tests. */ private transient int totalTests; private transient int skippedTests; private float duration; /** * Number of failed/error tests. */ private transient List<CaseResult> failedTests; private final boolean keepLongStdio; /** * Creates an empty result. */ public TestResult() { keepLongStdio = false; } @Deprecated public TestResult(long buildTime, DirectoryScanner results) throws IOException { this(buildTime, results, false); } /** * Collect reports from the given {@link DirectoryScanner}, while * filtering out all files that were created before the given time. * @param keepLongStdio if true, retain a suite's complete stdout/stderr even if this is huge and the suite passed * @since 1.358 */ public TestResult(long buildTime, DirectoryScanner results, boolean keepLongStdio) throws IOException { this.keepLongStdio = keepLongStdio; parse(buildTime, results); } public TestObject getParent() { return parent; } @Override public void setParent(TestObject parent) { this.parent = parent; } @Override public TestResult getTestResult() { return this; } /** * Collect reports from the given {@link DirectoryScanner}, while * filtering out all files that were created before the given time. */ public void parse(long buildTime, DirectoryScanner results) throws IOException { String[] includedFiles = results.getIncludedFiles(); File baseDir = results.getBasedir(); boolean parsed=false; for (String value : includedFiles) { File reportFile = new File(baseDir, value); // only count files that were actually updated during this build if ( (buildTime-3000/*error margin*/ <= reportFile.lastModified()) || !checkTimestamps) { if(reportFile.length()==0) { // this is a typical problem when JVM quits abnormally, like OutOfMemoryError during a test. SuiteResult sr = new SuiteResult(reportFile.getName(), "", ""); sr.addCase(new CaseResult(sr,"<init>","Test report file "+reportFile.getAbsolutePath()+" was length 0")); add(sr); } else { parse(reportFile); } parsed = true; } } if(!parsed) { long localTime = System.currentTimeMillis(); if(localTime < buildTime-1000) /*margin*/ // build time is in the the future. clock on this slave must be running behind throw new AbortException( "Clock on this slave is out of sync with the master, and therefore \n" + "I can't figure out what test results are new and what are old.\n" + "Please keep the slave clock in sync with the master."); File f = new File(baseDir,includedFiles[0]); throw new AbortException( String.format( "Test reports were found but none of them are new. Did tests run? \n"+ "For example, %s is %s old\n", f, Util.getTimeSpanString(buildTime-f.lastModified()))); } } private void add(SuiteResult sr) { for (SuiteResult s : suites) { // a common problem is that people parse TEST-*.xml as well as TESTS-TestSuite.xml // see http://www.nabble.com/Problem-with-duplicate-build-execution-td17549182.html for discussion if(s.getName().equals(sr.getName()) && eq(s.getTimestamp(),sr.getTimestamp())) return; // duplicate } suites.add(sr); duration += sr.getDuration(); } private boolean eq(Object lhs, Object rhs) { return lhs != null && rhs != null && lhs.equals(rhs); } /** * Parses an additional report file. */ public void parse(File reportFile) throws IOException { try { for (SuiteResult suiteResult : SuiteResult.parse(reportFile, keepLongStdio)) add(suiteResult); } catch (RuntimeException e) { throw new IOException2("Failed to read "+reportFile,e); } catch (DocumentException e) { if (!reportFile.getPath().endsWith(".xml")) { throw new IOException2("Failed to read "+reportFile+"\n"+ "Is this really a JUnit report file? Your configuration must be matching too many files",e); } else { SuiteResult sr = new SuiteResult(reportFile.getName(), "", ""); StringWriter writer = new StringWriter(); e.printStackTrace(new PrintWriter(writer)); String error = "Failed to read test report file "+reportFile.getAbsolutePath()+"\n"+writer.toString(); sr.addCase(new CaseResult(sr,"<init>",error)); add(sr); throw new IOException2("Failed to read "+reportFile,e); } } } public String getDisplayName() { return Messages.TestResult_getDisplayName(); } @Override public AbstractBuild<?,?> getOwner() { return (parentAction == null? null: parentAction.owner); } @Override public hudson.tasks.test.TestResult findCorrespondingResult(String id) { if (getId().equals(id) || (id == null)) { return this; } String firstElement = null; String subId = null; int sepIndex = id.indexOf('/'); if (sepIndex < 0) { firstElement = id; subId = null; } else { firstElement = id.substring(0, sepIndex); subId = id.substring(sepIndex + 1); if (subId.length() == 0) { subId = null; } } String packageName = null; if (firstElement.equals(getId())) { sepIndex = subId.indexOf('/'); if (sepIndex < 0) { packageName = subId; subId = null; } else { packageName = subId.substring(0, sepIndex); subId = subId.substring(sepIndex + 1); } } else { packageName = firstElement; subId = null; } PackageResult child = byPackage(packageName); if (child != null) { if (subId != null) { return child.findCorrespondingResult(subId); } else { return child; } } else { return null; } } @Override public String getTitle() { return Messages.TestResult_getTitle(); } @Override public String getChildTitle() { return Messages.TestResult_getChildTitle(); } @Exported(visibility=999) @Override public float getDuration() { return duration; } @Exported(visibility=999) @Override public int getPassCount() { return totalTests-getFailCount()-getSkipCount(); } @Exported(visibility=999) @Override public int getFailCount() { if(failedTests==null) return 0; else return failedTests.size(); } @Exported(visibility=999) @Override public int getSkipCount() { return skippedTests; } @Override public List<CaseResult> getFailedTests() { return failedTests; } /** * Gets the "children" of this test result that passed * * @return the children of this test result, if any, or an empty collection */ @Override public Collection<? extends hudson.tasks.test.TestResult> getPassedTests() { throw new UnsupportedOperationException(); // TODO: implement!(FIXME: generated) } /** * Gets the "children" of this test result that were skipped * * @return the children of this test result, if any, or an empty list */ @Override public Collection<? extends hudson.tasks.test.TestResult> getSkippedTests() { throw new UnsupportedOperationException(); // TODO: implement!(FIXME: generated) } /** * If this test failed, then return the build number * when this test started failing. */ @Override public int getFailedSince() { throw new UnsupportedOperationException(); // TODO: implement!(FIXME: generated) } /** * If this test failed, then return the run * when this test started failing. */ @Override public Run<?, ?> getFailedSinceRun() { throw new UnsupportedOperationException(); // TODO: implement!(FIXME: generated) } /** * The stdout of this test. * <p/> * <p/> * Depending on the tool that produced the XML report, this method works somewhat inconsistently. * With some tools (such as Maven surefire plugin), you get the accurate information, that is * the stdout from this test case. With some other tools (such as the JUnit task in Ant), this * method returns the stdout produced by the entire test suite. * <p/> * <p/> * If you need to know which is the case, compare this output from {@link SuiteResult#getStdout()}. * * @since 1.294 */ @Override public String getStdout() { StringBuilder sb = new StringBuilder(); for (SuiteResult suite: suites) { sb.append("Standard Out (stdout) for Suite: " + suite.getName()); sb.append(suite.getStdout()); } return sb.toString(); } /** * The stderr of this test. * * @see #getStdout() * @since 1.294 */ @Override public String getStderr() { StringBuilder sb = new StringBuilder(); for (SuiteResult suite: suites) { sb.append("Standard Error (stderr) for Suite: " + suite.getName()); sb.append(suite.getStderr()); } return sb.toString(); } /** * If there was an error or a failure, this is the stack trace, or otherwise null. */ @Override public String getErrorStackTrace() { return "No error stack traces available at this level. Drill down to individual tests to find stack traces."; } /** * If there was an error or a failure, this is the text from the message. */ @Override public String getErrorDetails() { return "No error details available at this level. Drill down to individual tests to find details."; } /** * @return true if the test was not skipped and did not fail, false otherwise. */ @Override public boolean isPassed() { return (getFailCount() == 0); } @Override public Collection<PackageResult> getChildren() { return byPackages.values(); } /** * Whether this test result has children. */ @Override public boolean hasChildren() { return !suites.isEmpty(); } @Exported(inline=true,visibility=9) public Collection<SuiteResult> getSuites() { return suites; } @Override public String getName() { return "junit"; } @Override public Object getDynamic(String token, StaplerRequest req, StaplerResponse rsp) { if (token.equals(getId())) { return this; } PackageResult result = byPackage(token); if (result != null) { return result; } else { return super.getDynamic(token, req, rsp); } } public PackageResult byPackage(String packageName) { return byPackages.get(packageName); } public SuiteResult getSuite(String name) { return suitesByName.get(name); } @Override public void setParentAction(AbstractTestResultAction action) { this.parentAction = action; tally(); // I want to be sure to inform our children when we get an action. } @Override public AbstractTestResultAction getParentAction() { return this.parentAction; } /** * Recount my children. */ @Override public void tally() { /// Empty out data structures // TODO: free children? memmory leak? suitesByName = new HashMap<String,SuiteResult>(); failedTests = new ArrayList<CaseResult>(); byPackages = new TreeMap<String,PackageResult>(); totalTests = 0; skippedTests = 0; // Ask all of our children to tally themselves for (SuiteResult s : suites) { s.setParent(this); // kluge to prevent double-counting the results suitesByName.put(s.getName(),s); List<CaseResult> cases = s.getCases(); for (CaseResult cr: cases) { cr.setParentAction(this.parentAction); cr.setParentSuiteResult(s); cr.tally(); String pkg = cr.getPackageName(), spkg = safe(pkg); PackageResult pr = byPackage(spkg); if(pr==null) byPackages.put(spkg,pr=new PackageResult(this,pkg)); pr.add(cr); } } for (PackageResult pr : byPackages.values()) { pr.tally(); skippedTests += pr.getSkipCount(); failedTests.addAll(pr.getFailedTests()); totalTests += pr.getTotalCount(); } } /** * Builds up the transient part of the data structure * from results {@link #parse(File) parsed} so far. * * <p> * After the data is frozen, more files can be parsed * and then freeze can be called again. */ public void freeze(TestResultAction parent) { this.parentAction = parent; if(suitesByName==null) { // freeze for the first time suitesByName = new HashMap<String,SuiteResult>(); totalTests = 0; failedTests = new ArrayList<CaseResult>(); byPackages = new TreeMap<String,PackageResult>(); } for (SuiteResult s : suites) { if(!s.freeze(this)) // this is disturbing: has-a-parent is conflated with has-been-counted continue; suitesByName.put(s.getName(),s); totalTests += s.getCases().size(); for(CaseResult cr : s.getCases()) { if(cr.isSkipped()) skippedTests++; else if(!cr.isPassed()) failedTests.add(cr); String pkg = cr.getPackageName(), spkg = safe(pkg); PackageResult pr = byPackage(spkg); if(pr==null) byPackages.put(spkg,pr=new PackageResult(this,pkg)); pr.add(cr); } } Collections.sort(failedTests,CaseResult.BY_AGE); for (PackageResult pr : byPackages.values()) pr.freeze(); } private static final long serialVersionUID = 1L; private static final boolean checkTimestamps = true; // TODO: change to System.getProperty }
package org.orienteer.core.module; import com.orientechnologies.orient.core.db.document.ODatabaseDocument; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.index.OIndex; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.schema.OClass.INDEX_TYPE; import com.orientechnologies.orient.core.metadata.schema.OProperty; import com.orientechnologies.orient.core.metadata.schema.OSchema; import com.orientechnologies.orient.core.metadata.schema.OType; import com.orientechnologies.orient.core.metadata.security.OIdentity; import com.orientechnologies.orient.core.metadata.security.OSecurityRole; import com.orientechnologies.orient.core.metadata.security.OSecurityUser; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.sql.OCommandSQL; import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery; import com.orientechnologies.orient.core.type.ODocumentWrapper; import org.apache.wicket.model.ResourceModel; import org.orienteer.core.CustomAttribute; import org.orienteer.core.OrienteerWebApplication; import org.orienteer.core.component.visualizer.UIVisualizersRegistry; import org.orienteer.core.util.CommonUtils; import org.orienteer.core.util.OSchemaHelper; import ru.ydn.wicket.wicketorientdb.utils.DBClosure; import javax.inject.Singleton; import java.util.*; import java.util.stream.Collectors; /** * {@link IOrienteerModule} for "perspectives" feature of Orienteer */ @Singleton public class PerspectivesModule extends AbstractOrienteerModule { public static final String NAME = "perspectives"; public static final String ALIAS_PERSPECTIVE_DEFAULT = "default"; public static final String ALIAS_ITEM_USERS = "users"; public static final String ALIAS_ITEM_ROLES = "roles"; public static final String ALIAS_ITEM_SCHEMA = "schema"; public static final String ALIAS_ITEM_LOCALIZATION = "localization"; public static final String ALIAS_ITEM_PERSPECTIVES = "perspectives"; public static final String ALIAS_ITEM_MODULES = "modules"; public static final String PROP_PERSPECTIVE = "perspective"; public PerspectivesModule() { super(NAME, 7); } @Override public ODocument onInstall(OrienteerWebApplication app, ODatabaseDocument db) { OSchemaHelper helper = OSchemaHelper.bind(db); helper.oClass(OPerspective.CLASS_NAME) .oProperty(OPerspective.PROP_NAME, OType.EMBEDDEDMAP, 0) .assignVisualization(UIVisualizersRegistry.VISUALIZER_LOCALIZATION) .markAsDocumentName() .linkedType(OType.STRING) .oProperty(OPerspective.PROP_ALIAS, OType.STRING, 10) .notNull() .oIndex(INDEX_TYPE.UNIQUE) .oProperty(OPerspective.PROP_ICON, OType.STRING, 20) .oProperty(OPerspective.PROP_HOME_URL, OType.STRING, 30) .oProperty(OPerspective.PROP_MENU, OType.LINKLIST, 40) .assignVisualization(UIVisualizersRegistry.VISUALIZER_TABLE) .oProperty(OPerspective.PROP_FOOTER, OType.STRING, 50) .assignVisualization(UIVisualizersRegistry.VISUALIZER_TEXTAREA) .oProperty(OPerspective.PROP_FEATURES, OType.EMBEDDEDSET, 60) .linkedType(OType.STRING) .switchDisplayable(true, OPerspective.PROP_NAME, OPerspective.PROP_ICON, OPerspective.PROP_HOME_URL); helper.oClass(OPerspectiveItem.CLASS_NAME) .oProperty(OPerspectiveItem.PROP_NAME, OType.EMBEDDEDMAP, 0) .assignVisualization(UIVisualizersRegistry.VISUALIZER_LOCALIZATION) .markAsDocumentName() .oProperty(OPerspectiveItem.PROP_ALIAS, OType.STRING, 10) .notNull() .oProperty(OPerspectiveItem.PROP_ICON, OType.STRING, 20) .oProperty(OPerspectiveItem.PROP_URL, OType.STRING, 30) .oProperty(OPerspectiveItem.PROP_PERSPECTIVE, OType.LINK, 40) .markAsLinkToParent() .oProperty(OPerspectiveItem.PROP_PERSPECTIVE_ITEM, OType.LINK, 50) .markAsLinkToParent() .oProperty(OPerspectiveItem.PROP_SUB_ITEMS, OType.LINKLIST, 60) .assignVisualization(UIVisualizersRegistry.VISUALIZER_TABLE) .switchDisplayable(true, OPerspectiveItem.PROP_NAME, OPerspectiveItem.PROP_ICON, OPerspectiveItem.PROP_URL); helper.setupRelationship(OPerspective.CLASS_NAME, OPerspective.PROP_MENU, OPerspectiveItem.CLASS_NAME, OPerspectiveItem.PROP_PERSPECTIVE); helper.setupRelationship(OPerspectiveItem.CLASS_NAME, OPerspectiveItem.PROP_SUB_ITEMS, OPerspectiveItem.CLASS_NAME, OPerspectiveItem.PROP_PERSPECTIVE_ITEM); helper.oClass(OIdentity.CLASS_NAME) .oProperty(PROP_PERSPECTIVE, OType.LINK) .linkedClass(OPerspective.CLASS_NAME); createDefaultPerspective(helper); return null; } private ODocument createDefaultPerspective(OSchemaHelper helper) { helper.oClass(OPerspective.CLASS_NAME); ODocument defaultPerspective = helper.oDocument(OPerspective.PROP_ALIAS, ALIAS_PERSPECTIVE_DEFAULT) .field(OPerspective.PROP_NAME, CommonUtils.toMap("en", new ResourceModel("perspective.default.name").getObject())) .field(OPerspective.PROP_HOME_URL, "/schema") .field(OPerspective.PROP_ICON, "fa fa-cog") .saveDocument() .getODocument(); helper.oClass(OPerspectiveItem.CLASS_NAME); helper.oDocument(OPerspectiveItem.PROP_ALIAS, ALIAS_ITEM_USERS) .field(OPerspectiveItem.PROP_NAME, CommonUtils.toMap("en", new ResourceModel("perspective.item.default.users").getObject())) .field(OPerspectiveItem.PROP_ICON, "fa fa-users") .field(OPerspectiveItem.PROP_URL, "/browse/OUser") .field(OPerspectiveItem.PROP_PERSPECTIVE, defaultPerspective) .saveDocument(); helper.oDocument(OPerspectiveItem.PROP_ALIAS, ALIAS_ITEM_ROLES) .field(OPerspectiveItem.PROP_NAME, CommonUtils.toMap("en", new ResourceModel("perspective.item.default.roles").getObject())) .field(OPerspectiveItem.PROP_ICON, "fa fa-user-circle") .field(OPerspectiveItem.PROP_URL, "/browse/ORole") .field(OPerspectiveItem.PROP_PERSPECTIVE, defaultPerspective) .saveDocument(); helper.oDocument(OPerspectiveItem.PROP_ALIAS, ALIAS_ITEM_SCHEMA) .field(OPerspectiveItem.PROP_NAME, CommonUtils.toMap("en", new ResourceModel("perspective.item.default.schema").getObject())) .field(OPerspectiveItem.PROP_ICON, "fa fa-cubes") .field(OPerspectiveItem.PROP_URL, "/schema") .field(OPerspectiveItem.PROP_PERSPECTIVE, defaultPerspective) .saveDocument(); helper.oDocument(OPerspectiveItem.PROP_ALIAS, ALIAS_ITEM_LOCALIZATION) .field(OPerspectiveItem.PROP_NAME, CommonUtils.toMap("en", new ResourceModel("perspective.item.default.localization").getObject())) .field(OPerspectiveItem.PROP_ICON, "fa fa-language") .field(OPerspectiveItem.PROP_URL, "/browse/OLocalization") .field(OPerspectiveItem.PROP_PERSPECTIVE, defaultPerspective) .saveDocument(); helper.oDocument(OPerspectiveItem.PROP_ALIAS, ALIAS_ITEM_PERSPECTIVES) .field(OPerspectiveItem.PROP_NAME, CommonUtils.toMap("en", new ResourceModel("perspective.item.default.perspectives").getObject())) .field(OPerspectiveItem.PROP_ICON, "fa fa-desktop") .field(OPerspectiveItem.PROP_URL, "/browse/" + OPerspective.CLASS_NAME) .field(OPerspectiveItem.PROP_PERSPECTIVE, defaultPerspective) .saveDocument(); helper.oDocument(OPerspectiveItem.PROP_ALIAS, ALIAS_ITEM_MODULES) .field(OPerspectiveItem.PROP_NAME, CommonUtils.toMap("en", new ResourceModel("perspective.item.default.modules").getObject())) .field(OPerspectiveItem.PROP_ICON, "fa fa-archive") .field(OPerspectiveItem.PROP_URL, "/browse/" + AbstractOrienteerModule.OMODULE_CLASS) .field(OPerspectiveItem.PROP_PERSPECTIVE, defaultPerspective) .saveDocument(); return defaultPerspective; } @Override public void onUpdate(OrienteerWebApplication app, ODatabaseDocument db, int oldVersion, int newVersion) { int toVersion = oldVersion+1; switch (toVersion) { case 2: convertNameProperty(app, db, OPerspective.CLASS_NAME); convertNameProperty(app, db, OPerspectiveItem.CLASS_NAME); break; case 3: onInstall(app, db); break; case 4: OIndex<?> index = db.getMetadata().getIndexManager().getIndex(OPerspective.CLASS_NAME + ".name"); if(index!=null) index.delete(); onInstall(app, db); break; case 5: OSchemaHelper.bind(db) .oClass(OIdentity.CLASS_NAME) .oProperty(PROP_PERSPECTIVE, OType.LINK).linkedClass(OPerspective.CLASS_NAME); break; case 6: OSchemaHelper helper = OSchemaHelper.bind(db); helper.oClass(OPerspective.CLASS_NAME) .oProperty(OPerspective.PROP_ALIAS, OType.STRING, 10); db.command(new OCommandSQL("update OPerspective set alias=name['en'].toLowerCase() where alias is null")) .execute(); helper.notNull() .oIndex(INDEX_TYPE.UNIQUE); //update aliases helper.oClass(OPerspectiveItem.CLASS_NAME) .oProperty(OPerspectiveItem.PROP_ALIAS, OType.STRING, 10); db.command(new OCommandSQL("update OPerspectiveItem set alias=name['en'].toLowerCase() where alias is null")) .execute(); helper.notNull(); break; case 7: OSchemaHelper.bind(db) .oClass(OPerspective.CLASS_NAME) .oProperty(OPerspective.PROP_FEATURES, OType.EMBEDDEDSET, 60) .linkedType(OType.STRING); default: break; } if(toVersion<newVersion) onUpdate(app, db, toVersion, newVersion); } private void convertNameProperty(OrienteerWebApplication app, ODatabaseDocument db, String className) { boolean wasInTransacton = db.getTransaction().isActive(); db.commit(); for(ODocument doc : db.browseClass(className)) { Object value = doc.field("name"); if(value instanceof String) { doc.field("temp", (Object) doc.field("name")); doc.field("name", (String) null); doc.save(); } } OClass oClass = db.getMetadata().getSchema().getClass(className); oClass.dropProperty("name"); OProperty nameProperty = oClass.createProperty("name", OType.EMBEDDEDMAP); CustomAttribute.VISUALIZATION_TYPE.setValue(nameProperty, "localization"); for(ODocument doc : db.browseClass(className)) { if(doc.containsField("temp")) { doc.field("name", CommonUtils.toMap("en", doc.field("temp"))); doc.removeField("temp"); doc.save(); } } if(wasInTransacton) db.begin(); } public Optional<OPerspective> getPerspectiveByAlias(ODatabaseDocument db, String alias) { return getPerspectiveByAliasAsDocument(db, alias) .map(OPerspective::new); } public Optional<ODocument> getPerspectiveByAliasAsDocument(ODatabaseDocument db, String alias) { String sql = String.format("select from %s where %s = ?", OPerspective.CLASS_NAME, OPerspective.PROP_ALIAS); List<OIdentifiable> identifiables = db.query(new OSQLSynchQuery<>(sql, 1), alias); return CommonUtils.getDocument(identifiables); } public Optional<OPerspectiveItem> getPerspectiveItemByAlias(ODatabaseDocument db, String alias) { return getPerspectiveItemByAliasAsDocument(db, alias) .map(OPerspectiveItem::new); } public Optional<ODocument> getPerspectiveItemByAliasAsDocument(ODatabaseDocument db, String alias) { String sql = String.format("select from %s where %s =?", OPerspectiveItem.CLASS_NAME, OPerspectiveItem.PROP_ALIAS); List<OIdentifiable> identifiable = db.query(new OSQLSynchQuery<>(sql, 1), alias); return CommonUtils.getDocument(identifiable); } public ODocument getDefaultPerspective(ODatabaseDocument db, OSecurityUser user) { if (user != null) { if (user.getDocument().field(PROP_PERSPECTIVE) != null) { return ((OIdentifiable) user.getDocument().field(PROP_PERSPECTIVE)).getRecord(); } Set<? extends OSecurityRole> roles = user.getRoles(); for (OSecurityRole oRole : roles) { ODocument perspective = getPerspectiveForORole(oRole); if (perspective != null) { return perspective; } } } return getPerspectiveByAliasAsDocument(db, ALIAS_PERSPECTIVE_DEFAULT) // Restore default perspective if it was not found .orElseGet(() -> DBClosure.sudo((adminDb)->createDefaultPerspective(OSchemaHelper.bind(adminDb)))); } public ODocument getPerspectiveForORole(OSecurityRole role) { if (role == null) { return null; } if (role.getDocument().field(PROP_PERSPECTIVE) != null) { return ((OIdentifiable) role.getDocument().field(PROP_PERSPECTIVE)).getRecord(); } OSecurityRole parentRole = role.getParentRole(); return parentRole != null && !parentRole.equals(role) ? getPerspectiveForORole(role) : null; } public void updateUserPerspective(ODocument user, ODocument perspective) { if (user != null) { DBClosure.sudoConsumer(db -> { user.field(PROP_PERSPECTIVE, perspective); user.save(); }); } } @Override public void onInitialize(OrienteerWebApplication app, ODatabaseDocument db) { OSchema schema = db.getMetadata().getSchema(); if (schema.getClass(OPerspective.CLASS_NAME) == null || schema.getClass(OPerspectiveItem.CLASS_NAME) == null) { //Repair onInstall(app, db); } } /** * Model which represents class OPerspective */ public static class OPerspective extends ODocumentWrapper { public static final String CLASS_NAME = "OPerspective"; public static final String PROP_NAME = "name"; public static final String PROP_ALIAS = "alias"; public static final String PROP_ICON = "icon"; public static final String PROP_HOME_URL = "homeUrl"; public static final String PROP_MENU = "menu"; public static final String PROP_FOOTER = "footer"; public static final String PROP_FEATURES = "features"; public OPerspective() { super(CLASS_NAME); } public OPerspective(String iClassName) { super(iClassName!=null?iClassName:CLASS_NAME); } public OPerspective(ODocument iDocument) { super(iDocument!=null?iDocument:new ODocument(CLASS_NAME)); } public Map<String, String> getName() { return document.field(PROP_NAME); } public OPerspective setName(Map<String, String> name) { document.field(PROP_NAME, name); return this; } public String getAlias() { return document.field(PROP_ALIAS); } public OPerspective setAlias(String alias) { document.field(PROP_ALIAS, alias); return this; } public String getIcon() { return document.field(PROP_ICON); } public OPerspective setIcon(String icon) { document.field(PROP_ICON, icon); return this; } public String getHomeUrl() { return document.field(PROP_HOME_URL); } public OPerspective setHomeUrl(String url) { document.field(PROP_HOME_URL, url); return this; } public List<OPerspectiveItem> getMenu() { return getMenuAsDocuments().stream() .map(OPerspectiveItem::new) .collect(Collectors.toCollection(LinkedList::new)); } public List<ODocument> getMenuAsDocuments() { return CommonUtils.getDocuments(document.field(PROP_MENU)); } public OPerspective setMenu(List<OPerspectiveItem> menu) { List<ODocument> docs = menu == null ? Collections.emptyList() : menu.stream() .map(OPerspectiveItem::getDocument) .collect(Collectors.toCollection(LinkedList::new)); return setMenuAsDocuments(docs); } public OPerspective setMenuAsDocuments(List<ODocument> menu) { document.field(PROP_MENU, menu); return this; } public String getFooter() { return document.field(PROP_FOOTER); } public OPerspective setFooter(String footer) { document.field(PROP_FOOTER, footer); return this; } public Collection<String> getFeatures() { return document.field(PROP_FEATURES); } public OPerspective setFeatures(Collection<String> features) { document.field(PROP_FEATURES, features); return this; } public boolean providesFeature(String feature) { Collection<String> features = getFeatures(); return features!=null?features.contains(feature):false; } } /** * Model which represents class OPerspectiveItem */ public static class OPerspectiveItem extends ODocumentWrapper { public static final String CLASS_NAME = "OPerspectiveItem"; public static final String PROP_NAME = "name"; public static final String PROP_ALIAS = "alias"; public static final String PROP_ICON = "icon"; public static final String PROP_URL = "url"; public static final String PROP_PERSPECTIVE = "perspective"; public static final String PROP_PERSPECTIVE_ITEM = "perspectiveItem"; public static final String PROP_SUB_ITEMS = "subItems"; public OPerspectiveItem() { super(CLASS_NAME); } public OPerspectiveItem(String iClassName) { super(iClassName); } public OPerspectiveItem(ODocument iDocument) { super(iDocument); } public Map<String, String> getName() { return document.field(PROP_NAME); } public OPerspectiveItem setName(Map<String, String> name) { document.field(PROP_NAME); return this; } public String getAlias() { return document.field(PROP_ALIAS); } public OPerspectiveItem setAlias(String alias) { document.field(PROP_ALIAS); return this; } public String getIcon() { return document.field(PROP_ICON); } public OPerspectiveItem setIcon(String icon) { document.field(PROP_ICON, icon); return this; } public String getUrl() { return document.field(PROP_URL); } public OPerspectiveItem setUrl(String url) { document.field(PROP_URL, url); return this; } public OPerspective getPerspective() { ODocument perspective = getPerspectiveAsDocument(); return perspective != null ? new OPerspective(perspective) : null; } public ODocument getPerspectiveAsDocument() { OIdentifiable perspective = document.field(PROP_PERSPECTIVE); return perspective != null ? perspective.getRecord() : null; } public OPerspectiveItem setPerspective(OPerspective perspective) { return setPerspectiveAsDocument(perspective != null ? perspective.getDocument() : null); } public OPerspectiveItem setPerspectiveAsDocument(ODocument perspective) { document.field(PROP_PERSPECTIVE, perspective); return this; } public OPerspectiveItem getPerspectiveItem() { ODocument perspectiveItem = getPerspectiveItemAsDocument(); return perspectiveItem != null ? new OPerspectiveItem(perspectiveItem) : null; } public ODocument getPerspectiveItemAsDocument() { OIdentifiable perspectiveItem = document.field(PROP_PERSPECTIVE_ITEM); return perspectiveItem != null ? perspectiveItem.getRecord() : null; } public OPerspectiveItem setPerspectiveItem(OPerspectiveItem perspectiveItem) { return setPerspectiveItemAsDocument(perspectiveItem != null ? perspectiveItem.getDocument() : null); } public OPerspectiveItem setPerspectiveItemAsDocument(ODocument perspectiveItem) { document.field(PROP_PERSPECTIVE_ITEM, perspectiveItem); return this; } public List<OPerspectiveItem> getSubItems() { return getSubItemsAsDocuments().stream() .map(OPerspectiveItem::new) .collect(Collectors.toCollection(LinkedList::new)); } public List<ODocument> getSubItemsAsDocuments() { List<OIdentifiable> subItems = document.field(PROP_SUB_ITEMS); return CommonUtils.getDocuments(subItems); } public OPerspectiveItem setSubItems(List<OPerspectiveItem> subItems) { List<ODocument> docs = subItems == null ? Collections.emptyList() : subItems.stream() .map(OPerspectiveItem::getDocument) .collect(Collectors.toCollection(LinkedList::new)); return setSubItemsAsDocuments(docs); } public OPerspectiveItem setSubItemsAsDocuments(List<ODocument> subItems) { document.field(PROP_SUB_ITEMS, subItems); return this; } } }
/* * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.andes.configuration; import java.lang.reflect.InvocationTargetException; import java.net.InetAddress; import java.net.UnknownHostException; import java.text.DateFormat; import java.text.MessageFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Locale; import org.apache.commons.configuration.CompositeConfiguration; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.XMLConfiguration; import org.apache.commons.configuration.tree.xpath.XPathExpressionEngine; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.andes.configuration.enums.AndesConfiguration; import org.wso2.andes.configuration.util.ConfigurationProperty; import org.wso2.andes.kernel.AndesException; import org.wso2.carbon.utils.ServerConstants; /** * This class acts as a singleton access point for all config parameters used within MB. */ public class AndesConfigurationManager { private static Log log = LogFactory.getLog(AndesConfigurationManager.class); /** * Reserved Suffixes that activate different processing logic. */ private static final String PORT_TYPE = "_PORT"; /** * Reserved Prefixes that activate different processing logic. */ private static final String LIST_TYPE = "LIST_"; /** * Common Error states */ private static final String GENERIC_CONFIGURATION_PARSE_ERROR = "Error occurred when trying to parse " + "configuration value {0}."; private static final String NO_CHILD_FOR_INDEX_IN_PROPERTY = "There was no child at the given index {0} for the " + "parent property {1}."; private static final String NO_CHILD_FOR_KEY_IN_PROPERTY = "There was no child at the given key {0} for the " + "parent property {1}."; private static final String PROPERTY_NOT_A_LIST = "The input property {0} does not contain a list of child " + "properties."; private static final String PROPERTY_NOT_A_PORT = "The input property {0} is not defined as an integer value. " + "Therefore it is not a port property."; /** * Main file path of configuration files into which all other andes-specific config files (if any) * should be linked. */ private static final String ROOT_CONFIG_FILE_PATH = System.getProperty(ServerConstants.CARBON_HOME) + "/repository/conf/"; /** * File name of the main configuration file. */ private static final String ROOT_CONFIG_FILE_NAME = "broker.xml"; /** * Apache commons composite configuration is used to collect and maintain properties from multiple configuration * sources. */ private static CompositeConfiguration compositeConfiguration; /** * Decisive configurations coming from carbon.xml that affect the MB configs. e.g port Offset * These are injected as custom logic when reading the configurations. */ private static int carbonPortOffset; /** * initialize the configuration manager. this MUST be called at application startup. * (QpidServiceComponent bundle -> activate event) * * @throws AndesException */ public static void initialize(int portOffset) throws AndesException { try { compositeConfiguration = new CompositeConfiguration(); compositeConfiguration.setDelimiterParsingDisabled(true); log.info("Main andes configuration located at : " + ROOT_CONFIG_FILE_PATH + ROOT_CONFIG_FILE_NAME); XMLConfiguration rootConfiguration = new XMLConfiguration(); rootConfiguration.setDelimiterParsingDisabled(true); rootConfiguration.setFileName(ROOT_CONFIG_FILE_PATH + ROOT_CONFIG_FILE_NAME); rootConfiguration.setExpressionEngine(new XPathExpressionEngine()); rootConfiguration.load(); compositeConfiguration.addConfiguration(rootConfiguration); // Load and combine other configurations linked to broker.xml String[] linkedConfigurations = compositeConfiguration.getStringArray("links/link"); for (String linkedConfigurationPath : linkedConfigurations) { log.info("Linked configuration file path : " + ROOT_CONFIG_FILE_PATH + linkedConfigurationPath); XMLConfiguration linkedConfiguration = new XMLConfiguration(); linkedConfiguration.setDelimiterParsingDisabled(false); linkedConfiguration.setFileName(ROOT_CONFIG_FILE_PATH + linkedConfigurationPath); linkedConfiguration.setExpressionEngine(new XPathExpressionEngine()); linkedConfiguration.load(); compositeConfiguration.addConfiguration(linkedConfiguration); } // derive certain special properties that are not simply specified in // the configuration files. addDerivedProperties(); // set carbonPortOffset coming from carbon AndesConfigurationManager.carbonPortOffset = portOffset; } catch (ConfigurationException e) { String error = "Error occurred when trying to construct configurations from file at path : " + ROOT_CONFIG_FILE_PATH + ROOT_CONFIG_FILE_NAME; log.error(error, e); throw new AndesException(error, e); } catch (UnknownHostException e) { String error = "Error occurred when trying to derive the bind address for messaging from configurations."; log.error(error, e); throw new AndesException(error, e); } } /** * The sole method exposed to everyone accessing configurations. We can use the relevant * enums (e.g.- config.enums.BrokerConfiguration) to pass the required property and * its meta information. * * @param <T> Expected data type of the property * @param configurationProperty relevant enum value (e.g.- config.enums.AndesConfiguration) * @return Value of config in the expected data type. * @throws org.wso2.andes.kernel.AndesException */ public static <T> T readValue(ConfigurationProperty configurationProperty) { // If the property requests a port value, we need to apply the carbon offset to it. if (configurationProperty.get().getName().endsWith(PORT_TYPE)) { return (T) readPortValue(configurationProperty); } String valueInFile = compositeConfiguration.getString(configurationProperty.get() .getKeyInFile()); try { // The cast to T is unavoidable. Even though the function returns the same data type, // compiler doesn't know about it. We could add the data type as a parameter, // but that only complicates the method call. return (T) deriveValidConfigurationValue(configurationProperty.get().getKeyInFile(), configurationProperty.get().getDataType(), configurationProperty.get().getDefaultValue(), valueInFile); } catch (ConfigurationException e) { log.error(e); // Since the descriptive message is wrapped in exception itself // Return the parsed default value. This path will be met if a user adds an invalid value to a property. // Assuming we always have proper default values defined, this will rescue us from exploding due to a // small mistake. try { return (T) deriveValidConfigurationValue(configurationProperty.get().getKeyInFile(), configurationProperty.get().getDataType(), configurationProperty.get().getDefaultValue(), null); } catch (ConfigurationException e1) { // It is highly unlikely that this will throw an exception (if defined default values are also invalid). // But if it does, the method will return null. // Exception is not propagated to avoid unnecessary clutter of config related exception handling. log.error(e); // Since the descriptive message is wrapped in exception itself return null; } } } /** * Using this method, you can access a singular property of a child. * <p/> * example, * <p/> * <users> * <user userName="testuser1" password="password1" /> * <user userName="testuser2" password="password2" /> * </users> scenario. * * @param configurationProperty relevant enum value (e.g.- above scenario -> config * .enums.AndesConfiguration.TRANSPORTS_MQTT_PASSWORD) * @param index index of the child of whom you seek the property (e.g. above scenario -> 1) */ public static <T> T readValueOfChildByIndex(AndesConfiguration configurationProperty, int index) { String constructedKey = configurationProperty.get().getKeyInFile().replace("{i}", String.valueOf(index)); String valueInFile = compositeConfiguration.getString(constructedKey); // The cast to T is unavoidable. Even though the function returns the same data type, // compiler doesn't know about it. We could add the data type as a parameter, // but that only complicates the method call. try { return (T) deriveValidConfigurationValue(configurationProperty.get().getKeyInFile(), configurationProperty.get().getDataType(), configurationProperty.get().getDefaultValue(), valueInFile); } catch (ConfigurationException e) { // This means that there is no child by the given index for the parent property. log.error(MessageFormat.format(NO_CHILD_FOR_INDEX_IN_PROPERTY, index, configurationProperty), e); return null; } } /** * Using this method, you can access a singular property of a child. * <p/> * example, * <p/> * <users> * <user userName="testuser1">password1</user> * <user userName="testuser2">password2</user> * </users> scenario. * * @param configurationProperty relevant enum value (e.g.- above scenario -> config * .enums.AndesConfiguration.TRANSPORTS_MQTT_PASSWORD) * @param key key of the child of whom you seek the value (e.g. above scenario -> "testuser2") */ public static <T> T readValueOfChildByKey(AndesConfiguration configurationProperty, String key) { String constructedKey = configurationProperty.get().getKeyInFile().replace("{key}", key); String valueInFile = compositeConfiguration.getString(constructedKey); // The cast to T is unavoidable. Even though the function returns the same data type, // compiler doesn't know about it. We could add the data type as a parameter, // but that only complicates the method call. try { return (T) deriveValidConfigurationValue(configurationProperty.get().getKeyInFile(), configurationProperty.get().getDataType(), configurationProperty.get().getDefaultValue(), valueInFile); } catch (ConfigurationException e) { // This means that there is no child by the given key for the parent property. log.error(MessageFormat.format(NO_CHILD_FOR_KEY_IN_PROPERTY, key, configurationProperty), e); return null; } } /** * Use this method when you need to acquire a list of properties of same group. * * @param configurationProperty relevant enum value (e.g.- config.enums.AndesConfiguration * .LIST_TRANSPORTS_MQTT_USERNAMES) * @return String list of required property values */ public static List<String> readValueList(AndesConfiguration configurationProperty) { if (configurationProperty.toString().startsWith(LIST_TYPE)) { return Arrays.asList(compositeConfiguration.getStringArray(configurationProperty.get().getKeyInFile())); } else { log.error(MessageFormat.format(PROPERTY_NOT_A_LIST, configurationProperty)); return new ArrayList<String>(); } } /** * Given the data type and the value read from a config, this returns the parsed value * of the property. * * @param key The Key to the property being read (n xpath format as contained in file.) * @param dataType Expected data type of the property * @param defaultValue This parameter should NEVER be null since we assign a default value to * every config property. * @param readValue Value read from the config file * @param <T> Expected data type of the property * @return Value of config in the expected data type. * @throws ConfigurationException */ private static <T> T deriveValidConfigurationValue(String key, Class<T> dataType, String defaultValue, String readValue) throws ConfigurationException { if (log.isDebugEnabled()) { log.debug("Reading andes configuration value " + key); } String validValue = defaultValue; if (StringUtils.isBlank(readValue)) { log.warn("Error when trying to read property : " + key + ". Switching to " + "default value : " + defaultValue); } else { validValue = readValue; } if (log.isDebugEnabled()) { log.debug("Valid value read for andes configuration property " + key + " is : " + validValue); } try { if (Boolean.class.equals(dataType)) { return dataType.cast(Boolean.parseBoolean(validValue)); } else if (Date.class.equals(dataType)) { // Sample date : "Sep 28 20:29:30 JST 2000" DateFormat df = new SimpleDateFormat("MMM dd kk:mm:ss z yyyy", Locale.ENGLISH); return dataType.cast(df.parse(validValue)); } else if (dataType.isEnum()) { // this will indirectly forces programmer to define enum values in upper case return (T) Enum.valueOf((Class<? extends Enum>) dataType, validValue.toUpperCase(Locale.ENGLISH)); } else { return dataType.getConstructor(String.class).newInstance(validValue); } } catch (NoSuchMethodException e) { throw new ConfigurationException(MessageFormat.format(GENERIC_CONFIGURATION_PARSE_ERROR, key), e); } catch (ParseException e) { throw new ConfigurationException(MessageFormat.format(GENERIC_CONFIGURATION_PARSE_ERROR, key), e); } catch (IllegalAccessException e) { throw new ConfigurationException(MessageFormat.format(GENERIC_CONFIGURATION_PARSE_ERROR, key), e); } catch (InvocationTargetException e) { throw new ConfigurationException(MessageFormat.format(GENERIC_CONFIGURATION_PARSE_ERROR, key), e); } catch (InstantiationException e) { throw new ConfigurationException(MessageFormat.format(GENERIC_CONFIGURATION_PARSE_ERROR, key), e); } } /** * This method is used to derive certain special properties that are not simply specified in * the configuration files. */ private static void addDerivedProperties() throws AndesException, UnknownHostException { // For AndesConfiguration.TRANSPORTS_BIND_ADDRESS if ("*".equals(readValue(AndesConfiguration.TRANSPORTS_BIND_ADDRESS))) { InetAddress host = InetAddress.getLocalHost(); compositeConfiguration.setProperty(AndesConfiguration.TRANSPORTS_BIND_ADDRESS.get().getKeyInFile(), host.getHostAddress()); } } /** * This method is used when reading a port value from configuration. It is intended to abstract the port offset * logic.If the enum contains keyword "_PORT", this will be called * * @param configurationProperty relevant enum value (e.g.- above scenario -> config.enums.AndesConfiguration * .TRANSPORTS_MQTT_PORT) * @return port with carbon port offset */ private static Integer readPortValue(ConfigurationProperty configurationProperty) { if (!Integer.class.equals(configurationProperty.get().getDataType())) { log.error(MessageFormat.format(AndesConfigurationManager.PROPERTY_NOT_A_PORT, configurationProperty)); return 0; // 0 can never be a valid port. therefore, returning 0 in the error path will keep code // predictable. } try { String valueInFile = compositeConfiguration.getString(configurationProperty.get().getKeyInFile()); Integer portFromConfiguration = (Integer) deriveValidConfigurationValue(configurationProperty.get() .getKeyInFile(), configurationProperty.get().getDataType(), configurationProperty.get().getDefaultValue(), valueInFile); return portFromConfiguration + carbonPortOffset; } catch (ConfigurationException e) { log.error(MessageFormat.format(GENERIC_CONFIGURATION_PARSE_ERROR, configurationProperty), e); //recover and return default port with offset value. return Integer.parseInt(configurationProperty.get().getDefaultValue()) + carbonPortOffset; } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInspection.offlineViewer; import com.intellij.codeInsight.daemon.impl.CollectHighlightsUtil; import com.intellij.codeInsight.daemon.impl.DaemonProgressIndicator; import com.intellij.codeInsight.daemon.impl.analysis.HighlightingLevelManager; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.codeInspection.*; import com.intellij.codeInspection.actions.RunInspectionAction; import com.intellij.codeInspection.ex.*; import com.intellij.codeInspection.offline.OfflineProblemDescriptor; import com.intellij.codeInspection.reference.RefElement; import com.intellij.codeInspection.reference.RefEntity; import com.intellij.codeInspection.reference.RefModule; import com.intellij.codeInspection.ui.InspectionToolPresentation; import com.intellij.lang.Language; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.util.PsiUtilCore; import com.intellij.util.ArrayUtil; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; /** * @author Dmitry Batkovich */ class OfflineDescriptorResolveResult { private static final Logger LOG = Logger.getInstance(OfflineDescriptorResolveResult.class); private final RefEntity myResolvedEntity; private final CommonProblemDescriptor myResolvedDescriptor; private volatile boolean myExcluded; private OfflineDescriptorResolveResult(RefEntity resolvedEntity, CommonProblemDescriptor resolvedDescriptor) { myResolvedEntity = resolvedEntity; myResolvedDescriptor = resolvedDescriptor; } @Nullable RefEntity getResolvedEntity() { return myResolvedEntity; } @Nullable CommonProblemDescriptor getResolvedDescriptor() { return myResolvedDescriptor; } public boolean isExcluded() { return myExcluded; } public void setExcluded(boolean excluded) { myExcluded = excluded; } @NotNull static OfflineDescriptorResolveResult resolve(@NotNull OfflineProblemDescriptor descriptor, @NotNull InspectionToolWrapper wrapper, @NotNull InspectionToolPresentation presentation) { final RefEntity element = descriptor.getRefElement(presentation.getContext().getRefManager()); final CommonProblemDescriptor resolvedDescriptor = ReadAction.compute(() -> createDescriptor(element, descriptor, wrapper, presentation)); return new OfflineDescriptorResolveResult(element, resolvedDescriptor); } @Nullable private static CommonProblemDescriptor createDescriptor(@Nullable RefEntity element, @NotNull OfflineProblemDescriptor offlineDescriptor, @NotNull InspectionToolWrapper toolWrapper, @NotNull InspectionToolPresentation presentation) { if (toolWrapper instanceof GlobalInspectionToolWrapper) { final LocalInspectionToolWrapper localTool = ((GlobalInspectionToolWrapper)toolWrapper).getSharedLocalInspectionToolWrapper(); if (localTool != null) { final CommonProblemDescriptor descriptor = createDescriptor(element, offlineDescriptor, localTool, presentation); if (descriptor != null) { return descriptor; } } return createRerunGlobalToolDescriptor((GlobalInspectionToolWrapper)toolWrapper, element, offlineDescriptor); } if (!(toolWrapper instanceof LocalInspectionToolWrapper)) return null; final InspectionManager inspectionManager = InspectionManager.getInstance(presentation.getContext().getProject()); final OfflineProblemDescriptor offlineProblemDescriptor = offlineDescriptor; if (element instanceof RefElement) { final PsiElement psiElement = ((RefElement)element).getPsiElement(); if (psiElement != null) { ProblemDescriptor descriptor = ProgressManager.getInstance().runProcess( () -> runLocalTool(psiElement, offlineProblemDescriptor, (LocalInspectionToolWrapper)toolWrapper, inspectionManager, presentation.getContext()), new DaemonProgressIndicator()); if (descriptor != null) return descriptor; } return null; } final List<String> hints = offlineProblemDescriptor.getHints(); CommonProblemDescriptor descriptor = inspectionManager.createProblemDescriptor(offlineProblemDescriptor.getDescription(), (QuickFix)null); final QuickFix[] quickFixes = getFixes(descriptor, hints, presentation); if (quickFixes != null) { descriptor = inspectionManager.createProblemDescriptor(offlineProblemDescriptor.getDescription(), quickFixes); } return descriptor; } private static ProblemDescriptor runLocalTool(@NotNull PsiElement psiElement, @NotNull OfflineProblemDescriptor offlineProblemDescriptor, @NotNull LocalInspectionToolWrapper toolWrapper, @NotNull InspectionManager inspectionManager, @NotNull GlobalInspectionContextImpl context) { PsiFile containingFile = psiElement.getContainingFile(); final ProblemsHolder holder = new ProblemsHolder(inspectionManager, containingFile, false); final LocalInspectionTool localTool = toolWrapper.getTool(); TextRange textRange = psiElement.getTextRange(); LOG.assertTrue(textRange != null, "text range muse be not null here; " + "isValid = " + psiElement.isValid() + ", " + "isPhysical = " + psiElement.isPhysical() + ", " + "containingFile = " + containingFile.getName() + ", " + "inspection = " + toolWrapper.getShortName()); final int startOffset = textRange.getStartOffset(); final int endOffset = textRange.getEndOffset(); LocalInspectionToolSession session = new LocalInspectionToolSession(containingFile, startOffset, endOffset); final PsiElementVisitor visitor = localTool.buildVisitor(holder, true, session); localTool.inspectionStarted(session, false); final PsiElement[] elementsInRange = getElementsIntersectingRange(containingFile, startOffset, endOffset); InjectedLanguageManager injectedLanguageManager = InjectedLanguageManager.getInstance(context.getProject()); for (PsiElement element : elementsInRange) { List<Pair<PsiElement, TextRange>> injectedPsiFiles = injectedLanguageManager.getInjectedPsiFiles(element); if (injectedPsiFiles != null) { for (Pair<PsiElement, TextRange> file : injectedPsiFiles) { file.getFirst().accept(new PsiRecursiveElementWalkingVisitor() { @Override public void visitElement(PsiElement element) { element.accept(visitor); super.visitElement(element); } }); } } element.accept(visitor); } localTool.inspectionFinished(session, holder); if (holder.hasResults()) { final List<ProblemDescriptor> list = holder.getResults(); final int idx = offlineProblemDescriptor.getProblemIndex(); int curIdx = 0; for (ProblemDescriptor descriptor : list) { final PsiNamedElement member = BatchModeDescriptorsUtil.getContainerElement(descriptor.getPsiElement(), localTool, context); if (psiElement instanceof PsiFile || psiElement.equals(member)) { if (curIdx == idx) { return descriptor; } curIdx++; } } } return null; } @NotNull private static PsiElement[] getElementsIntersectingRange(PsiFile file, final int startOffset, final int endOffset) { final FileViewProvider viewProvider = file.getViewProvider(); final Set<PsiElement> result = new LinkedHashSet<>(); for (Language language : viewProvider.getLanguages()) { final PsiFile psiRoot = viewProvider.getPsi(language); if (HighlightingLevelManager.getInstance(file.getProject()).shouldInspect(psiRoot)) { result.addAll(CollectHighlightsUtil.getElementsInRange(psiRoot, startOffset, endOffset, true)); } } return PsiUtilCore.toPsiElementArray(result); } @Nullable private static LocalQuickFix[] getFixes(@NotNull CommonProblemDescriptor descriptor, List<String> hints, InspectionToolPresentation presentation) { final List<LocalQuickFix> fixes = new ArrayList<>(hints == null ? 1 : hints.size()); if (hints == null) { addFix(descriptor, fixes, null, presentation); } else { for (String hint : hints) { addFix(descriptor, fixes, hint, presentation); } } return fixes.isEmpty() ? null : fixes.toArray(LocalQuickFix.EMPTY_ARRAY); } private static void addFix(@NotNull CommonProblemDescriptor descriptor, final List<? super LocalQuickFix> fixes, String hint, InspectionToolPresentation presentation) { final IntentionAction intentionAction = presentation.findQuickFixes(descriptor, hint); if (intentionAction instanceof QuickFixWrapper) { fixes.add(((QuickFixWrapper)intentionAction).getFix()); } } private static CommonProblemDescriptor createRerunGlobalToolDescriptor(@NotNull GlobalInspectionToolWrapper wrapper, @Nullable RefEntity entity, OfflineProblemDescriptor offlineDescriptor) { QuickFix rerunFix = new QuickFix() { @Nls @NotNull @Override public String getFamilyName() { return "Rerun \'" + wrapper.getDisplayName() + "\' inspection"; } @Override public void applyFix(@NotNull Project project, @NotNull CommonProblemDescriptor descriptor) { VirtualFile file = null; if (entity != null && entity.isValid() && entity instanceof RefElement) { file = ((RefElement)entity).getPointer().getVirtualFile(); } PsiFile psiFile = null; if (file != null) { psiFile = PsiManager.getInstance(project).findFile(file); } RunInspectionAction.runInspection(project, wrapper.getShortName(), file, null, psiFile); } @Override public boolean startInWriteAction() { return false; } }; List<String> hints = offlineDescriptor.getHints(); if (hints != null && entity instanceof RefModule) { List<QuickFix> fixes = hints.stream().map(hint -> wrapper.getTool().getQuickFix(hint)).filter(f -> f != null).collect(Collectors.toList()); return new ModuleProblemDescriptorImpl(ArrayUtil.append(fixes.toArray(QuickFix.EMPTY_ARRAY), rerunFix), offlineDescriptor.getDescription(), ((RefModule)entity).getModule()); } return new CommonProblemDescriptorImpl(new QuickFix[]{rerunFix}, offlineDescriptor.getDescription()); } }
/* Copyright 2016 Goldman Sachs. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.gs.fw.common.mithra.test; import com.gs.collections.impl.set.mutable.primitive.*; import com.gs.fw.common.mithra.attribute.Attribute; import com.gs.fw.common.mithra.finder.Operation; import com.gs.fw.common.mithra.test.domain.InfinityTimestamp; import com.gs.fw.common.mithra.test.domain.ParaDesk; import com.gs.fw.common.mithra.test.domain.ParaDeskFinder; import com.gs.fw.common.mithra.test.domain.ParaDeskList; import junit.framework.TestCase; import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.HashSet; import java.util.Set; public class TestInOperations extends TestCase { protected static SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); protected Date getDawnOfTime() { try { return timestampFormat.parse("1900-01-01 00:00:00"); } catch (ParseException e) { //never happens } return null; } public void testBooleanInOperatons() { final Boolean[] testBools = { true }; final BooleanHashSet boolSet = new BooleanHashSet(); boolSet.add(true); final Operation setOp = ParaDeskFinder.activeBoolean().in(boolSet); this.genericInOperationEqualsTest(testBools, ParaDeskFinder.activeBoolean(), setOp); } public void testByteInOperatons() { final Byte[] testBytes = { (byte) 10, (byte) 20, (byte) 30, (byte) 40, (byte) 50, (byte) 127, (byte) -127, (byte) -128, (byte) 127 }; final ByteHashSet byteSet = new ByteHashSet(); for (byte b : testBytes) { byteSet.add(b); } final Operation setOp = ParaDeskFinder.locationByte().in(byteSet); this.genericInOperationEqualsTest(testBytes, ParaDeskFinder.locationByte(), setOp); } public void testCharacterInOperatons() { final Character[] testChars = { 'O', 'P', 'G', 'T' }; final CharHashSet charSet = new CharHashSet(); for (char c : testChars) { charSet.add(c); } final Operation setOp = ParaDeskFinder.statusChar().in(charSet); this.genericInOperationEqualsTest(testChars, ParaDeskFinder.statusChar(), setOp); } public void testDoubleInOperatons() { final Double[] testDoubles = { 4000000000.0d, 677673542.3d }; final DoubleHashSet doubleSet = new DoubleHashSet(); for (double dbl : testDoubles) { doubleSet.add(dbl); } final Operation setOp = ParaDeskFinder.sizeDouble().in(doubleSet); this.genericInOperationEqualsTest(testDoubles, ParaDeskFinder.sizeDouble(), setOp); } public void testFloatInOperatons() { final Float[] testFloats = { 4000000000.0f, 677673542.3f }; final FloatHashSet floatSet = new FloatHashSet(); for (float flt : testFloats) { floatSet.add(flt); } final Operation setOp = ParaDeskFinder.maxFloat().in(floatSet); this.genericInOperationEqualsTest(testFloats, ParaDeskFinder.maxFloat(), setOp); } public void testIntegerInOperatons() { final Integer[] testIntegers = { 100, 200, 300, 400, 500, 600, 700, 800, 900 }; final IntHashSet IntHashSet = new IntHashSet(); for (int i : testIntegers) { IntHashSet.add(i); } final Operation setOp = ParaDeskFinder.tagInt().in(IntHashSet); this.genericInOperationEqualsTest(testIntegers, ParaDeskFinder.tagInt(), setOp); } public void testLongInOperatons() { final Long[] testLongs = { 1000000L, 2000000L }; final LongHashSet longSet = new LongHashSet(); for (long lng : testLongs) { longSet.add(lng); } final Operation setOp = ParaDeskFinder.connectionLong().in(longSet); this.genericInOperationEqualsTest(testLongs, ParaDeskFinder.connectionLong(), setOp); } public void testShortInOperatons() { final Short[] testShorts = { 1000, 2000 }; final ShortHashSet shortSet = new ShortHashSet(); for (short shrt : testShorts) { shortSet.add(shrt); } final Operation setOp = ParaDeskFinder.minShort().in(shortSet); this.genericInOperationEqualsTest(testShorts, ParaDeskFinder.minShort(), setOp); } public void testStringInOperatons() { final String[] testStrings = { "rnd", "cap", "lsd", "zzz" }; final Set<String> stringSet = new HashSet(Arrays.asList(testStrings)); final Operation setOp = ParaDeskFinder.deskIdString().in(stringSet); this.genericInOperationEqualsTest(testStrings, ParaDeskFinder.deskIdString(), setOp); } public void testTimestampInOperatonsDawnOfTime() { final Timestamp[] testTimestamps = { new Timestamp(getDawnOfTime().getTime()), new Timestamp(System.currentTimeMillis()) }; final Set<Timestamp> timestampSet = new HashSet(Arrays.asList(testTimestamps)); final Operation setOp = ParaDeskFinder.createTimestamp().in(timestampSet); this.genericInOperationEqualsTest(testTimestamps, ParaDeskFinder.createTimestamp(), setOp); } public void testTimestampInOperatonsParaInfinityDate() { final Timestamp[] testTimestamps = { InfinityTimestamp.getParaInfinity(), new Timestamp(System.currentTimeMillis()) }; final Set<Timestamp> timestampSet = new HashSet(Arrays.asList(testTimestamps)); final Operation setOp = ParaDeskFinder.createTimestamp().in(timestampSet); this.genericInOperationEqualsTest(testTimestamps, ParaDeskFinder.createTimestamp(), setOp); } public void testDateInOperatons() { final Date[] testDates = { getDawnOfTime(), new Date() }; final Set<Date> dateSet = new HashSet(Arrays.asList(testDates)); final Operation setOp = ParaDeskFinder.closedDate().in(dateSet); this.genericInOperationEqualsTest(testDates, ParaDeskFinder.closedDate(), setOp); } private <T> void genericInOperationEqualsTest(final T[] testvalues, final Attribute<ParaDesk, T> attribute, final Operation setOp) { final ParaDeskList obsList = new ParaDeskList(); for (T value : testvalues) { final ParaDesk paraDesk = new ParaDesk(); attribute.setValue(paraDesk, value); obsList.add(paraDesk); } final Operation listOp = attribute.in(obsList, attribute); final Iterable<ParaDesk> objsIterable = new HashSet(obsList); final Operation iterableOp = attribute.in(objsIterable, attribute); assertEquals(setOp, listOp); assertEquals(setOp, iterableOp); assertEquals(listOp, iterableOp); } }
package com.hackbridge.viral; import android.app.*; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.location.Criteria; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.provider.Settings; import android.text.Html; import android.text.SpannableString; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.*; import java.io.IOException; import java.net.Socket; public class MainActivity extends Activity { public static Handler handle; private static MessageSender ms; private static MessageReceiver mr; private LocationManager locationManager; private String provider; private MyLocationListener mylistener; private Criteria criteria; private boolean round_on; private AwarenessState awareness; private PhysicalState physical; private long identity; private String server = "192.168.1.12";//"192.168.0.30";//"77.46.191.59";//"188.166.154.60"; private int port = 25000; private Socket sock; final MainActivity ma = this; /** * Setter for isOn */ public void setRoundOn(boolean isOn) { round_on = isOn; } /** * Initialises all the necessary threads */ public synchronized void restartEverything() { Log.d("LAG", "attempting reconnection"); Thread messageThread = new Thread() { @Override public void run() { ms = null; mr = null; try { Log.d("LAG", "Pre socket creation " + server + ", port " + port); sock = new Socket(server, port); Log.d("LAG", "Socket created"); } catch (IOException e) { // we reattempt connection if (!reatemptConnection()) { // we failed to reconnect fail(); } } boolean success; do { // attempt to make the sender and receiver and restart on failure success = setupSenderAndREceiver(); } while (!success && reatemptConnection()); if (!success) { fail(); } else { mr.setDaemon(true); mr.start(); } } }; messageThread.setDaemon(true); messageThread.start(); } /** * Notifies user about failure and kills the app */ private void fail() { // FAILURE, so we notify the user and exit Toast.makeText(MainActivity.this, "Fatal error, please try again later!", Toast.LENGTH_LONG).show(); System.exit(1); } /** * Attempts to reconnect to the defined server and port * * @return the socket if successful and null if not */ private Socket reconnect() { try { sock = new Socket(server, port); return sock; } catch (IOException e) { Log.d("LAG", "Cannot connect to " + server + ", port " + port); return null; } } /** * Repeatedly attempts to reconnect to the defined server and port with exponential backoff */ private boolean reatemptConnection() { int backoff = 1; boolean socket_is_alive = false; for (int i = 0; i < 7 && !socket_is_alive; i++) { Log.d("LAG", "Retrying connection: " + i); try { Thread.sleep(backoff * 1000); // sleep backoff seconds } catch (InterruptedException e) { // silently ignore this } // attempt reconnection if (reconnect() != null) { // success! we continue socket_is_alive = true; } backoff *= 2; } return socket_is_alive; } /** * Instantiates the sender and receiver threads * * @return true on success */ private boolean setupSenderAndREceiver() { mr = new MessageReceiver(ma, sock); try { ms = new MessageSender(ma, sock); } catch (IOException e) { Log.d("LAG", "Could not initialise sender thread, retrying!"); return false; } if (identity == -1) ms.sendMessage(new HelloNewMessage()); else ms.sendMessage(new HelloMessage(identity)); Log.d("LAG", "Sending hello message with id " + identity); return true; } /** * Sends a pop-up notification */ public void writeNotification(String title, String body) { Notification.Builder mBuilder = new Notification.Builder(this) .setSmallIcon(R.drawable.icon_syringe_left) .setContentTitle(title) .setContentText(body); // Creates an explicit intent for an Activity in the app Intent resultIntent = new Intent(this, MainActivity.class); // The stack builder object will contain an artificial back stack for the // started Activity. // This ensures that navigating backward from the Activity leads out of // the application to the Home screen. TaskStackBuilder stackBuilder = TaskStackBuilder.create(this); // Adds the back stack for the Intent (but not the Intent itself) stackBuilder.addParentStack(MainActivity.class); // Adds the Intent that starts the Activity to the top of the stack stackBuilder.addNextIntent(resultIntent); PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(0, PendingIntent.FLAG_UPDATE_CURRENT); mBuilder.setContentIntent(resultPendingIntent); NotificationManager mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); Notification mNotification = mBuilder.build(); // We want to hide the notification after it was selected mNotification.flags |= Notification.FLAG_AUTO_CANCEL; mNotificationManager.notify(1, mNotification); } /** * Loads the physical state from the saved state */ public PhysicalState loadPhysicalState() { SharedPreferences sharedPref = this.getPreferences(Context.MODE_PRIVATE); // identity reading and init switch int phys = sharedPref.getInt("physical", -1); switch (phys) { case 0: physical = PhysicalState.SUSCEPTIBLE; break; case 1: physical = PhysicalState.VACCINATED; break; case 2: physical = PhysicalState.INFECTED; break; } Log.d("LAG-LOGIC", "Loaded Physiscal State is: " + physical); return physical; } /** * Saves the given physical state in memory */ public void setPhysicalState(PhysicalState physicalState) { PhysicalState oldState = loadPhysicalState(); SharedPreferences sharedPref = this.getPreferences(Context.MODE_PRIVATE); SharedPreferences.Editor editor = sharedPref.edit(); switch (physicalState) { case SUSCEPTIBLE: case CARRIER: editor.putInt("physical", 0); break; case VACCINATED: editor.putInt("physical", 1); break; case INFECTED: editor.putInt("physical", 2); break; } editor.apply(); Log.d("LAG-LOGIC", "Physiscal State is: " + physical); physical = physicalState; } /** * Loads the special code from the saved state if one exists */ private String loadCode() { SharedPreferences sharedPref = this.getPreferences(Context.MODE_PRIVATE); // identity reading and init switch String code = sharedPref.getString("code", ""); Log.d("LAG-LOGIC", "Loaded code is: " + code); return code; } /** * Saves the given code in memory */ public void setCode(String code) { SharedPreferences sharedPref = this.getPreferences(Context.MODE_PRIVATE); SharedPreferences.Editor editor = sharedPref.edit(); editor.putString("code", code); editor.apply(); Log.d("LAG-LOGIC", "Code is: " + code); } /** * Loads the unique identity number */ private long loadIdentity() { SharedPreferences sharedPref = this.getPreferences(Context.MODE_PRIVATE); identity = sharedPref.getLong("identity", -1); Log.d("LAG-LOGIC", "Loaded identity is: " + identity); return identity; } /** * Stores the unique identity number */ public void setIdentity(long ident) { SharedPreferences sharedPref = this.getPreferences(Context.MODE_PRIVATE); SharedPreferences.Editor editor = sharedPref.edit(); editor.putLong("identity", ident); editor.apply(); identity = ident; Log.d("LAG-LOGIC", "Identity is: " + identity); } /** * Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Intent mainIntent = getIntent(); String ipText = mainIntent.getStringExtra("ip"); String portText = mainIntent.getStringExtra("port"); Log.d("LAG", "ip "+ ipText + "\nport " + portText); try { server = ipText; port = Integer.parseInt(portText); } catch (NumberFormatException n) { // fails because the port was malformed fail(); } final ImageView orb = (ImageView) findViewById(R.id.orb); final ImageView leftSyringe = (ImageView) findViewById(R.id.leftSyringe); final ImageView rightSyringe = (ImageView) findViewById(R.id.rightSyringe); final EditText codeInputTextBox = (EditText) findViewById(R.id.codeInputTextBox); final TextView vaccCodeLabel = (TextView) findViewById(R.id.vaccCodeLabel); final Button submitButton = (Button) findViewById(R.id.submitButton); final TextView stateLabel = (TextView) findViewById(R.id.stateLabel); final EditText codeGiver = (EditText) findViewById(R.id.codeGiver); final TextView instructionsLabel = (TextView) findViewById(R.id.instructionsLabel); // add a listener for the button submitButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (ms != null) ms.sendMessage(new CodeMessage(identity, codeInputTextBox.getText().toString())); } }); // Main UI thread handle = new Handler(Looper.getMainLooper()) { @Override public void handleMessage(android.os.Message inputMessage) { ChangeMessage front = (ChangeMessage) inputMessage.obj; PhysicalState physical = front.getInfected(); String code = front.getCode(); if (code.equals("~")) { orb.setImageResource(R.drawable.circle_gray); stateLabel.setText("ROUND NOT STARTED"); instructionsLabel.setText(""); codeGiver.setText(""); setCode(""); } else if (code.equals("-")) { orb.setImageResource(R.drawable.circle_gray); stateLabel.setText("ROUND FINISHED"); instructionsLabel.setText(""); codeGiver.setText(""); writeNotification("You have lost!", "You were unsuccessful in accomplishing your objective. Better luck next time!"); setCode(""); } else if (code.equals("+")) { orb.setImageResource(R.drawable.circle_gray); stateLabel.setText("ROUND FINISHED"); instructionsLabel.setText(""); codeGiver.setText(""); writeNotification("You have won!", "You have successfully accomplished your objective! Good work!"); setCode(""); } else { if (inputMessage.what == 1) { // human orb.setImageResource(R.drawable.circle_blue); stateLabel.setText("SUSCEPTIBLE"); setPhysicalState(PhysicalState.SUSCEPTIBLE); String s = "You are a <b>HUMAN</b>!<br>Your objective is to finish the round without getting infected."; instructionsLabel.setText(Html.fromHtml(s)); } else if (inputMessage.what == 2) { // infector orb.setImageResource(R.drawable.circle_blue); stateLabel.setText("SUSCEPTIBLE"); setPhysicalState(PhysicalState.SUSCEPTIBLE); String s = "You are an <b>INFECTOR</b>!<br>Your objective is to help infect at least half of the population by the end of the round."; instructionsLabel.setText(Html.fromHtml(s)); } PhysicalState oldPhysical = loadPhysicalState(); if (oldPhysical != physical) { switch (physical) { case SUSCEPTIBLE: case CARRIER: orb.setImageResource(R.drawable.circle_blue); stateLabel.setText("SUSCEPTIBLE"); break; case VACCINATED: orb.setImageResource(R.drawable.circle_green); stateLabel.setText("VACCINATED"); writeNotification("You are VACCINATED!", "Your vaccination has been successful!"); break; case INFECTED: orb.setImageResource(R.drawable.circle_red); stateLabel.setText("INFECTED"); writeNotification("You are INFECTED!", "Visit Viral, and don't lose hope!"); break; } setPhysicalState(physical); } String oldCode = loadCode(); if (!code.equals(oldCode)) { setCode(code); codeGiver.setText("Your Viral code is:\n\n" + code + "\n\nShare with care!"); if (!code.equals("")) { writeNotification("You are AWARE!", "Visit Viral for your vaccination code!"); } } } } }; // Get the location manager locationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE); // provider = LocationManager.NETWORK_PROVIDER; if (locationManager.getAllProviders().contains(LocationManager.GPS_PROVIDER) && locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER)) { provider = LocationManager.GPS_PROVIDER; Log.d("LAG-GPS", "GPS location provider selected"); } else if (locationManager.getAllProviders().contains(LocationManager.NETWORK_PROVIDER) && locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER)) { provider = LocationManager.NETWORK_PROVIDER; Log.d("LAG-GPS", "NETWORK location provider selected"); } else { // no network or gps, so we fail fail(); } // the last known location of this provider Location location = locationManager.getLastKnownLocation(provider); mylistener = new MyLocationListener(); if (location != null) { mylistener.onLocationChanged(location); } else { // leads to the settings because there is no last known location Intent intent = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS); startActivity(intent); } // location updates: at least 1 meter and 100millsecs change locationManager.requestLocationUpdates(provider, 100, 0.5f, mylistener); round_on = false; identity = loadIdentity(); final MainActivity m = this; restartEverything(); } /** * Listener designated to fire off a wrapped location to the server, once position changes */ private class MyLocationListener implements LocationListener { @Override public void onLocationChanged(Location location) { if (round_on && ms != null) ms.sendMessage(new PositionMessage(identity, new LocationWrapper(location.getLongitude(), location.getLatitude(), location.getAltitude()))); } @Override public void onStatusChanged(String provider, int status, Bundle extras) { Toast.makeText(MainActivity.this, provider + "'s status changed to " + status + "!", Toast.LENGTH_SHORT).show(); } @Override public void onProviderEnabled(String provider) { Toast.makeText(MainActivity.this, "Provider " + provider + " enabled!", Toast.LENGTH_SHORT).show(); } @Override public void onProviderDisabled(String provider) { Toast.makeText(MainActivity.this, "Provider " + provider + " disabled!", Toast.LENGTH_SHORT).show(); } } }
/* * Copyright 2012 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.scorecards.pmml; import org.dmg.pmml.pmml_4_2.descr.*; import org.drools.core.util.StringUtils; import org.drools.pmml.pmml_4_2.extensions.PMMLExtensionNames; import org.drools.pmml.pmml_4_2.extensions.PMMLIOAdapterMode; import org.drools.scorecards.StringUtil; import org.drools.scorecards.parser.xls.XLSKeywords; import java.math.BigInteger; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Locale; public class ScorecardPMMLGenerator { public PMML generateDocument(Scorecard pmmlScorecard) { //first clean up the scorecard removeEmptyExtensions(pmmlScorecard); createAndSetPredicates(pmmlScorecard); //second add additional elements to scorecard createAndSetOutput(pmmlScorecard); repositionExternalClassExtensions(pmmlScorecard); Extension scorecardPackage = ScorecardPMMLUtils.getExtension(pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas(), PMMLExtensionNames.MODEL_PACKAGE ); if ( scorecardPackage != null) { pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas().remove(scorecardPackage); } Extension importsExt = ScorecardPMMLUtils.getExtension(pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas(), PMMLExtensionNames.MODEL_IMPORTS ); if ( importsExt != null) { pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas().remove(importsExt); } Extension agendaGroupExt = ScorecardPMMLUtils.getExtension(pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas(), PMMLExtensionNames.AGENDA_GROUP ); if ( agendaGroupExt != null) { pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas().remove(agendaGroupExt); } Extension ruleFlowGroupExt = ScorecardPMMLUtils.getExtension(pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas(), PMMLExtensionNames.RULEFLOW_GROUP); if ( ruleFlowGroupExt != null) { pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas().remove(ruleFlowGroupExt); } //now create the PMML document PMML pmml = new PMML(); Header header = new Header(); Timestamp timestamp = new Timestamp(); timestamp.getContent().add(new SimpleDateFormat("yyyy.MM.dd 'at' HH:mm:ss z", Locale.ENGLISH).format(new Date())); header.setTimestamp(timestamp); header.setDescription("generated by the drools-scorecards module"); header.getExtensions().add(scorecardPackage); header.getExtensions().add(importsExt); if (ruleFlowGroupExt != null){ header.getExtensions().add(ruleFlowGroupExt); } if (agendaGroupExt != null){ header.getExtensions().add(agendaGroupExt); } pmml.setHeader(header); createAndSetDataDictionary(pmml, pmmlScorecard); pmml.getAssociationModelsAndBaselineModelsAndClusteringModels().add(pmmlScorecard); removeAttributeFieldExtension(pmmlScorecard); return pmml; } private void repositionExternalClassExtensions(Scorecard pmmlScorecard) { Characteristics characteristics = null; for (Object obj : pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas()) { if ( obj instanceof Characteristics ) { characteristics = (Characteristics) obj; break; } } for (Object obj : pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas()) { if ( obj instanceof MiningSchema ) { MiningSchema schema = (MiningSchema)obj; Extension adapter = new Extension(); adapter.setName( PMMLExtensionNames.IO_ADAPTER ); adapter.setValue( PMMLIOAdapterMode.BEAN.name() ); schema.getExtensions().add( adapter ); for (MiningField miningField : schema.getMiningFields()) { String fieldName = miningField.getName(); for (Characteristic characteristic : characteristics.getCharacteristics()){ String characteristicName = ScorecardPMMLUtils.extractFieldNameFromCharacteristic(characteristic); if (fieldName.equalsIgnoreCase(characteristicName)){ Extension extension = ScorecardPMMLUtils.getExtension(characteristic.getExtensions(), PMMLExtensionNames.EXTERNAL_CLASS ); if ( extension != null ) { characteristic.getExtensions().remove(extension); if ( ScorecardPMMLUtils.getExtension(miningField.getExtensions(), PMMLExtensionNames.EXTERNAL_CLASS ) == null ) { miningField.getExtensions().add(extension); } } } } } MiningField targetField = new MiningField(); targetField.setName( ScorecardPMMLExtensionNames.DEFAULT_PREDICTED_FIELD ); targetField.setUsageType( FIELDUSAGETYPE.PREDICTED ); schema.getMiningFields().add( targetField ); } else if ( obj instanceof Output ) { Extension adapter = new Extension(); adapter.setName( PMMLExtensionNames.IO_ADAPTER ); adapter.setValue( PMMLIOAdapterMode.BEAN.name() ); ( (Output) obj ).getExtensions().add( adapter ); } } } private void removeAttributeFieldExtension(Scorecard pmmlScorecard) { for (Object obj : pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas()) { if (obj instanceof Characteristics) { Characteristics characteristics = (Characteristics) obj; for (org.dmg.pmml.pmml_4_2.descr.Characteristic characteristic : characteristics.getCharacteristics()) { for (Attribute attribute : characteristic.getAttributes()) { Extension fieldExtension = ScorecardPMMLUtils.getExtension(attribute.getExtensions(), ScorecardPMMLExtensionNames.CHARACTERTISTIC_FIELD); if ( fieldExtension != null ) { attribute.getExtensions().remove(fieldExtension); //break; } } } } } } private void createAndSetDataDictionary(PMML pmml, Scorecard pmmlScorecard) { DataDictionary dataDictionary = new DataDictionary(); pmml.setDataDictionary(dataDictionary); int ctr = 0; for (Object obj : pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas()) { if (obj instanceof Characteristics) { Characteristics characteristics = (Characteristics) obj; for (org.dmg.pmml.pmml_4_2.descr.Characteristic characteristic : characteristics.getCharacteristics()) { DataField dataField = new DataField(); Extension dataTypeExtension = ScorecardPMMLUtils.getExtension(characteristic.getExtensions(), ScorecardPMMLExtensionNames.CHARACTERTISTIC_DATATYPE); String dataType = dataTypeExtension.getValue(); String factType = ScorecardPMMLUtils.getExtensionValue(characteristic.getExtensions(), ScorecardPMMLExtensionNames.CHARACTERTISTIC_FACTTYPE); if ( factType != null ){ Extension extension = new Extension(); extension.setName("FactType"); extension.setValue(factType); dataField.getExtensions().add(extension); } if (XLSKeywords.DATATYPE_NUMBER.equalsIgnoreCase(dataType)) { dataField.setDataType(DATATYPE.DOUBLE); dataField.setOptype(OPTYPE.CONTINUOUS); } else if (XLSKeywords.DATATYPE_TEXT.equalsIgnoreCase(dataType)) { dataField.setDataType(DATATYPE.STRING); dataField.setOptype(OPTYPE.CATEGORICAL); } else if (XLSKeywords.DATATYPE_BOOLEAN.equalsIgnoreCase(dataType)) { dataField.setDataType(DATATYPE.BOOLEAN); dataField.setOptype(OPTYPE.CATEGORICAL); } String field = ""; for (Attribute attribute : characteristic.getAttributes()) { for (Extension extension : attribute.getExtensions()) { if ( ScorecardPMMLExtensionNames.CHARACTERTISTIC_FIELD.equalsIgnoreCase(extension.getName())) { field = extension.getValue(); break; }// } } dataField.setName(field); dataDictionary.getDataFields().add(dataField); characteristic.getExtensions().remove(dataTypeExtension); ctr++; } } } DataField targetField = new DataField(); targetField.setName( ScorecardPMMLExtensionNames.DEFAULT_PREDICTED_FIELD ); targetField.setDataType( DATATYPE.DOUBLE ); targetField.setOptype( OPTYPE.CONTINUOUS ); dataDictionary.getDataFields().add( targetField ); dataDictionary.setNumberOfFields(BigInteger.valueOf(ctr + 1)); } private void createAndSetOutput(Scorecard pmmlScorecard) { Extension classExtension = ScorecardPMMLUtils.getExtension(pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas(), PMMLExtensionNames.EXTERNAL_CLASS); Extension fieldExtension = ScorecardPMMLUtils.getExtension(pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas(), ScorecardPMMLExtensionNames.SCORECARD_RESULTANT_SCORE_FIELD); Extension reasonCodeExtension = ScorecardPMMLUtils.getExtension(pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas(), ScorecardPMMLExtensionNames.SCORECARD_RESULTANT_REASONCODES_FIELD); for (Object obj : pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas()) { if (obj instanceof Output) { Output output = (Output)obj; OutputField outputField = new OutputField(); outputField.setDataType(DATATYPE.DOUBLE); outputField.setFeature(RESULTFEATURE.PREDICTED_VALUE); outputField.setDisplayName("Final Score"); if ( fieldExtension != null ) { pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas().remove(fieldExtension); outputField.setName(fieldExtension.getValue()); } else { outputField.setName( "calculatedScore" ); } if ( classExtension != null ) { pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas().remove(classExtension); outputField.getExtensions().add( classExtension ); } output.getOutputFields().add(outputField); if ( pmmlScorecard.isUseReasonCodes() ) { OutputField reasonCodeField = new OutputField(); reasonCodeField.setDataType( DATATYPE.STRING ); reasonCodeField.setFeature( RESULTFEATURE.REASON_CODE ); reasonCodeField.setDisplayName( "Principal Reason Code" ); if ( reasonCodeExtension != null ) { pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas().remove(reasonCodeExtension); reasonCodeField.getExtensions().add( classExtension ); reasonCodeField.setName( reasonCodeExtension.getValue() ); } else { reasonCodeField.setName( "reasonCode" ); } output.getOutputFields().add( reasonCodeField ); } break; } } } private void createAndSetPredicates(Scorecard pmmlScorecard) { for (Object obj : pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas()) { if (obj instanceof Characteristics) { Characteristics characteristics = (Characteristics) obj; for (org.dmg.pmml.pmml_4_2.descr.Characteristic characteristic : characteristics.getCharacteristics()) { String dataType = ScorecardPMMLUtils.getExtensionValue(characteristic.getExtensions(), ScorecardPMMLExtensionNames.CHARACTERTISTIC_DATATYPE); Extension predicateExtension = null; for (Attribute attribute : characteristic.getAttributes()) { String predicateAsString = ""; String field = ScorecardPMMLUtils.getExtensionValue(attribute.getExtensions(), ScorecardPMMLExtensionNames.CHARACTERTISTIC_FIELD); for (Extension extension : attribute.getExtensions()) { if ("predicateResolver".equalsIgnoreCase(extension.getName())) { predicateAsString = extension.getValue(); predicateExtension = extension; break; } } setPredicatesForAttribute(attribute, dataType, field, predicateAsString); attribute.getExtensions().remove(predicateExtension); } } } } } private void setPredicatesForAttribute(Attribute pmmlAttribute, String dataType, String field, String predicateAsString) { predicateAsString = StringUtil.unescapeXML(predicateAsString); if (XLSKeywords.DATATYPE_NUMBER.equalsIgnoreCase(dataType)) { setNumericPredicate(pmmlAttribute, field, predicateAsString); } else if (XLSKeywords.DATATYPE_TEXT.equalsIgnoreCase(dataType)) { setTextPredicate(pmmlAttribute, field, predicateAsString); } else if (XLSKeywords.DATATYPE_BOOLEAN.equalsIgnoreCase(dataType)) { setBooleanPredicate(pmmlAttribute, field, predicateAsString); } } private void setBooleanPredicate(Attribute pmmlAttribute, String field, String predicateAsString) { SimplePredicate simplePredicate = new SimplePredicate(); simplePredicate.setField(field); simplePredicate.setOperator(PMMLOperators.EQUAL); if ("TRUE".equalsIgnoreCase(predicateAsString)){ simplePredicate.setValue("TRUE"); } else if ("FALSE".equalsIgnoreCase(predicateAsString)){ simplePredicate.setValue("FALSE"); } pmmlAttribute.setSimplePredicate(simplePredicate); } private void setTextPredicate(Attribute pmmlAttribute, String field, String predicateAsString) { String operator = ""; if (predicateAsString.startsWith("=")) { operator = "="; predicateAsString = predicateAsString.substring(1); } else if (predicateAsString.startsWith("!=")) { operator = "!="; predicateAsString = predicateAsString.substring(2); } if (predicateAsString.contains(",")) { SimpleSetPredicate simpleSetPredicate = new SimpleSetPredicate(); if ("!=".equalsIgnoreCase(operator)) { simpleSetPredicate.setBooleanOperator(PMMLOperators.IS_NOT_IN); } else { simpleSetPredicate.setBooleanOperator(PMMLOperators.IS_IN); } simpleSetPredicate.setField(field); predicateAsString = predicateAsString.trim(); if (predicateAsString.endsWith(",")) { predicateAsString = predicateAsString.substring(0, predicateAsString.length()-1); } Array array = new Array(); array.setContent(predicateAsString.replace(",", " ")); array.setType("string"); array.setN(BigInteger.valueOf(predicateAsString.split(",").length)); simpleSetPredicate.setArray(array); pmmlAttribute.setSimpleSetPredicate(simpleSetPredicate); } else { SimplePredicate simplePredicate = new SimplePredicate(); simplePredicate.setField(field); if ("!=".equalsIgnoreCase(operator)) { simplePredicate.setOperator(PMMLOperators.NOT_EQUAL); } else { simplePredicate.setOperator(PMMLOperators.EQUAL); } simplePredicate.setValue(predicateAsString); pmmlAttribute.setSimplePredicate(simplePredicate); } } private void setNumericPredicate(Attribute pmmlAttribute, String field, String predicateAsString) { if (predicateAsString.indexOf("-") > 0) { CompoundPredicate compoundPredicate = new CompoundPredicate(); compoundPredicate.setBooleanOperator("and"); String left = predicateAsString.substring(0, predicateAsString.indexOf("-")).trim(); String right = predicateAsString.substring(predicateAsString.indexOf("-") + 1).trim(); SimplePredicate simplePredicate = new SimplePredicate(); simplePredicate.setField(field); simplePredicate.setOperator(PMMLOperators.GREATER_OR_EQUAL); simplePredicate.setValue(left); compoundPredicate.getSimplePredicatesAndCompoundPredicatesAndSimpleSetPredicates().add(simplePredicate); simplePredicate = new SimplePredicate(); simplePredicate.setField(field); simplePredicate.setOperator(PMMLOperators.LESS_THAN); simplePredicate.setValue(right); compoundPredicate.getSimplePredicatesAndCompoundPredicatesAndSimpleSetPredicates().add(simplePredicate); pmmlAttribute.setCompoundPredicate(compoundPredicate); } else { SimplePredicate simplePredicate = new SimplePredicate(); simplePredicate.setField(field); if (predicateAsString.startsWith("<=")) { simplePredicate.setOperator(PMMLOperators.LESS_OR_EQUAL); simplePredicate.setValue(predicateAsString.substring(3).trim()); } else if (predicateAsString.startsWith(">=")) { simplePredicate.setOperator(PMMLOperators.GREATER_OR_EQUAL); simplePredicate.setValue(predicateAsString.substring(3).trim()); } else if (predicateAsString.startsWith("=")) { simplePredicate.setOperator(PMMLOperators.EQUAL); simplePredicate.setValue(predicateAsString.substring(2).trim()); } else if (predicateAsString.startsWith("!=")) { simplePredicate.setOperator(PMMLOperators.NOT_EQUAL); simplePredicate.setValue(predicateAsString.substring(3).trim()); } else if (predicateAsString.startsWith("<")) { simplePredicate.setOperator(PMMLOperators.LESS_THAN); simplePredicate.setValue(predicateAsString.substring(2).trim()); } else if (predicateAsString.startsWith(">")) { simplePredicate.setOperator(PMMLOperators.GREATER_THAN); simplePredicate.setValue(predicateAsString.substring(2).trim()); } pmmlAttribute.setSimplePredicate(simplePredicate); } } private void removeEmptyExtensions(Scorecard pmmlScorecard) { for (Object obj : pmmlScorecard.getExtensionsAndCharacteristicsAndMiningSchemas()) { if (obj instanceof Characteristics) { Characteristics characteristics = (Characteristics) obj; for (org.dmg.pmml.pmml_4_2.descr.Characteristic characteristic : characteristics.getCharacteristics()) { List<Extension> toRemoveExtensionsList = new ArrayList<Extension>(); for (Extension extension : characteristic.getExtensions()) { if (StringUtils.isEmpty(extension.getValue())) { toRemoveExtensionsList.add(extension); } } for (Extension extension : toRemoveExtensionsList) { characteristic.getExtensions().remove(extension); } for (Attribute attribute : characteristic.getAttributes()) { List<Extension> toRemoveExtensionsList2 = new ArrayList<Extension>(); for (Extension extension : attribute.getExtensions()) { if (StringUtils.isEmpty(extension.getValue())) { toRemoveExtensionsList2.add(extension); } } for (Extension extension : toRemoveExtensionsList2) { attribute.getExtensions().remove(extension); } } } } } } }
package org.oskari.print.util; import java.io.BufferedReader; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.security.Principal; import java.util.Collection; import java.util.Enumeration; import java.util.HashMap; import java.util.Locale; import java.util.Map; import javax.servlet.AsyncContext; import javax.servlet.DispatcherType; import javax.servlet.RequestDispatcher; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletInputStream; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.servlet.http.HttpUpgradeHandler; import javax.servlet.http.Part; public class ModifiedHttpServletRequest implements HttpServletRequest { private final Map<String, String[]> params; public ModifiedHttpServletRequest(Map<String, String> params) { this.params = new HashMap<>(); for (Map.Entry<String, String> kvp : params.entrySet()) { String key = kvp.getKey(); String value = kvp.getValue(); if (value != null) { this.params.put(key, new String[] { value }); } } } @Override public Object getAttribute(String name) { return null; } @Override public Enumeration<String> getAttributeNames() { return null; } @Override public String getCharacterEncoding() { return null; } @Override public void setCharacterEncoding(String env) throws UnsupportedEncodingException { } @Override public int getContentLength() { return 0; } @Override public long getContentLengthLong() { return 0; } @Override public String getContentType() { return null; } @Override public ServletInputStream getInputStream() throws IOException { return null; } @Override public String getParameter(String name) { String[] a = params.get(name); return a == null ? null : a[0]; } @Override public Enumeration<String> getParameterNames() { return new IteratorEnumeration<>(params.keySet().iterator()); } @Override public String[] getParameterValues(String name) { return params.get(name); } @Override public Map<String, String[]> getParameterMap() { return params; } @Override public String getProtocol() { return null; } @Override public String getScheme() { return null; } @Override public String getServerName() { return null; } @Override public int getServerPort() { return 0; } @Override public BufferedReader getReader() throws IOException { return null; } @Override public String getRemoteAddr() { return null; } @Override public String getRemoteHost() { return null; } @Override public void setAttribute(String name, Object o) { } @Override public void removeAttribute(String name) { } @Override public Locale getLocale() { return null; } @Override public Enumeration<Locale> getLocales() { return null; } @Override public boolean isSecure() { return false; } @Override public RequestDispatcher getRequestDispatcher(String path) { return null; } @Override public String getRealPath(String path) { return null; } @Override public int getRemotePort() { return 0; } @Override public String getLocalName() { return null; } @Override public String getLocalAddr() { return null; } @Override public int getLocalPort() { return 0; } @Override public ServletContext getServletContext() { return null; } @Override public AsyncContext startAsync() throws IllegalStateException { return null; } @Override public AsyncContext startAsync(ServletRequest servletRequest, ServletResponse servletResponse) throws IllegalStateException { return null; } @Override public boolean isAsyncStarted() { return false; } @Override public boolean isAsyncSupported() { return false; } @Override public AsyncContext getAsyncContext() { return null; } @Override public DispatcherType getDispatcherType() { return null; } @Override public String getAuthType() { return null; } @Override public Cookie[] getCookies() { return null; } @Override public long getDateHeader(String name) { return 0; } @Override public String getHeader(String name) { return null; } @Override public Enumeration<String> getHeaders(String name) { return null; } @Override public Enumeration<String> getHeaderNames() { return null; } @Override public int getIntHeader(String name) { return 0; } @Override public String getMethod() { return null; } @Override public String getPathInfo() { return null; } @Override public String getPathTranslated() { return null; } @Override public String getContextPath() { return null; } @Override public String getQueryString() { return null; } @Override public String getRemoteUser() { return null; } @Override public boolean isUserInRole(String role) { return false; } @Override public Principal getUserPrincipal() { return null; } @Override public String getRequestedSessionId() { return null; } @Override public String getRequestURI() { return null; } @Override public StringBuffer getRequestURL() { return null; } @Override public String getServletPath() { return null; } @Override public HttpSession getSession(boolean create) { return null; } @Override public HttpSession getSession() { return null; } @Override public String changeSessionId() { return null; } @Override public boolean isRequestedSessionIdValid() { return false; } @Override public boolean isRequestedSessionIdFromCookie() { return false; } @Override public boolean isRequestedSessionIdFromURL() { return false; } @Override public boolean isRequestedSessionIdFromUrl() { return false; } @Override public boolean authenticate(HttpServletResponse response) throws IOException, ServletException { return false; } @Override public void login(String username, String password) throws ServletException { } @Override public void logout() throws ServletException { } @Override public Collection<Part> getParts() throws IOException, ServletException { return null; } @Override public Part getPart(String name) throws IOException, ServletException { return null; } @Override public <T extends HttpUpgradeHandler> T upgrade(Class<T> handlerClass) throws IOException, ServletException { return null; } }
/* * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.imagepipeline.producers; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import android.net.Uri; import com.facebook.common.references.CloseableReference; import com.facebook.imagepipeline.common.ImageDecodeOptions; import com.facebook.imagepipeline.common.Priority; import com.facebook.imagepipeline.decoder.ImageDecoder; import com.facebook.imagepipeline.decoder.ProgressiveJpegConfig; import com.facebook.imagepipeline.decoder.ProgressiveJpegParser; import com.facebook.imagepipeline.decoder.SimpleProgressiveJpegConfig; import com.facebook.imagepipeline.image.EncodedImage; import com.facebook.imagepipeline.image.ImmutableQualityInfo; import com.facebook.imagepipeline.memory.ByteArrayPool; import com.facebook.imagepipeline.memory.PooledByteBuffer; import com.facebook.imagepipeline.request.ImageRequest; import com.facebook.imagepipeline.request.ImageRequestBuilder; import org.junit.*; import org.junit.runner.*; import org.mockito.*; import org.mockito.Mock; import org.powermock.api.mockito.*; import org.powermock.core.classloader.annotations.*; import org.powermock.modules.junit4.rule.PowerMockRule; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import static org.junit.Assert.*; import static org.mockito.Mockito.*; @RunWith(RobolectricTestRunner.class) @PowerMockIgnore({ "org.mockito.*", "org.robolectric.*", "android.*" }) @Config(manifest= Config.NONE) @PrepareForTest({JobScheduler.class, ProgressiveJpegParser.class, DecodeProducer.class}) public class DecodeProducerTest { private static final ImageDecodeOptions IMAGE_DECODE_OPTIONS = ImageDecodeOptions.newBuilder() .setBackgroundColor(0) .setMinDecodeIntervalMs(100) .build(); private static final int PREVIEW_SCAN = 2; private static final int IGNORED_SCAN = 3; private static final int GOOD_ENOUGH_SCAN = 5; private static final int IMAGE_SIZE = 1000; @Mock public ByteArrayPool mByteArrayPool; @Mock public Executor mExecutor; @Mock public ImageDecoder mImageDecoder; private ProgressiveJpegConfig mProgressiveJpegConfig; @Mock public Producer mInputProducer; private ImageRequest mImageRequest; private String mRequestId; private CloseableReference<PooledByteBuffer> mByteBufferRef; private EncodedImage mEncodedImage; @Mock public ProducerListener mProducerListener; private SettableProducerContext mProducerContext; @Mock public Consumer mConsumer; @Mock public ProgressiveJpegParser mProgressiveJpegParser; @Mock public JobScheduler mJobScheduler; private DecodeProducer mDecodeProducer; @Rule public PowerMockRule rule = new PowerMockRule(); @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); mProgressiveJpegConfig = new SimpleProgressiveJpegConfig( new SimpleProgressiveJpegConfig.DynamicValueConfig() { public List<Integer> getScansToDecode() { return Arrays.asList(PREVIEW_SCAN, GOOD_ENOUGH_SCAN); } public int getGoodEnoughScanNumber() { return GOOD_ENOUGH_SCAN; } }); PowerMockito.mockStatic(ProgressiveJpegParser.class); PowerMockito.whenNew(ProgressiveJpegParser.class).withAnyArguments() .thenReturn(mProgressiveJpegParser); PowerMockito.mockStatic(JobScheduler.class); PowerMockito.whenNew(JobScheduler.class).withAnyArguments() .thenReturn(mJobScheduler); mDecodeProducer = new DecodeProducer( mByteArrayPool, mExecutor, mImageDecoder, mProgressiveJpegConfig, false, /* Set downsampleEnabled to false */ false, /* Set resizeAndRotateForNetwork to false */ mInputProducer); PooledByteBuffer pooledByteBuffer = mockPooledByteBuffer(IMAGE_SIZE); mByteBufferRef = CloseableReference.of(pooledByteBuffer); mEncodedImage = new EncodedImage(mByteBufferRef); } private static EncodedImage mockEncodedImage(CloseableReference<PooledByteBuffer> ref) { return new EncodedImage(ref); } @Test public void testNewResult_Final() { setupNetworkUri(); Consumer<EncodedImage> consumer = produceResults(); when(mJobScheduler.updateJob(mEncodedImage, true)).thenReturn(true); consumer.onNewResult(mEncodedImage, true); verify(mJobScheduler).updateJob(mEncodedImage, true); verify(mJobScheduler).scheduleJob(); verifyZeroInteractions(mProgressiveJpegParser); } @Test public void testNewResult_Final_Local() { setupLocalUri(); Consumer<EncodedImage> consumer = produceResults(); when(mJobScheduler.updateJob(mEncodedImage, true)).thenReturn(true); consumer.onNewResult(mEncodedImage, true); verify(mJobScheduler).updateJob(mEncodedImage, true); verify(mJobScheduler).scheduleJob(); verifyZeroInteractions(mProgressiveJpegParser); } @Test public void testNewResult_Intermediate_NonJPEG() { setupNetworkUri(); Consumer<EncodedImage> consumer = produceResults(); when(mJobScheduler.updateJob(mEncodedImage, false)).thenReturn(true); when(mProgressiveJpegParser.parseMoreData(mEncodedImage)).thenReturn(false); consumer.onNewResult(mEncodedImage, false); ArgumentCaptor<EncodedImage> argumentCaptor = ArgumentCaptor.forClass(EncodedImage.class); verify(mJobScheduler).updateJob(mEncodedImage, false); verify(mProgressiveJpegParser).parseMoreData(argumentCaptor.capture()); verify(mJobScheduler, never()).scheduleJob(); assertSame( ((EncodedImage) argumentCaptor.getValue()) .getUnderlyingReferenceTestOnly(), mByteBufferRef.getUnderlyingReferenceTestOnly()); } @Test public void testNewResult_Intermediate_Local() { setupLocalUri(); Consumer<EncodedImage> consumer = produceResults(); when(mJobScheduler.updateJob(mEncodedImage, false)).thenReturn(true); consumer.onNewResult(mEncodedImage, false); verify(mJobScheduler, never()).updateJob(mEncodedImage, false); verify(mProgressiveJpegParser, never()).parseMoreData(mEncodedImage); verify(mJobScheduler, never()).scheduleJob(); } @Test public void testNewResult_Intermediate_pJPEG() { setupNetworkUri(); Consumer<EncodedImage> consumer = produceResults(); InOrder inOrder = inOrder(mJobScheduler, mProgressiveJpegParser); ArgumentCaptor<EncodedImage> argumentCaptor = ArgumentCaptor.forClass(EncodedImage.class); // preview scan; schedule when(mJobScheduler.updateJob(mEncodedImage, false)).thenReturn(true); when(mProgressiveJpegParser.parseMoreData(any(EncodedImage.class))).thenReturn(true); when(mProgressiveJpegParser.getBestScanNumber()).thenReturn(PREVIEW_SCAN); consumer.onNewResult(mEncodedImage, false); inOrder.verify(mJobScheduler).updateJob(mEncodedImage, false); inOrder.verify(mProgressiveJpegParser).parseMoreData(argumentCaptor.capture()); inOrder.verify(mJobScheduler).scheduleJob(); assertSame( ((EncodedImage) argumentCaptor.getValue()) .getUnderlyingReferenceTestOnly(), mByteBufferRef.getUnderlyingReferenceTestOnly()); // no data parsed; ignore PooledByteBuffer pooledByteBuffer2 = mockPooledByteBuffer(210); CloseableReference<PooledByteBuffer> ref2 = CloseableReference.of(pooledByteBuffer2); EncodedImage encodedImage2 = mockEncodedImage(ref2); when(mJobScheduler.updateJob(encodedImage2, false)).thenReturn(true); when(mProgressiveJpegParser.parseMoreData(encodedImage2)).thenReturn(false); when(mProgressiveJpegParser.getBestScanNumber()).thenReturn(PREVIEW_SCAN); consumer.onNewResult(encodedImage2, false); inOrder.verify(mJobScheduler).updateJob(encodedImage2, false); inOrder.verify(mProgressiveJpegParser).parseMoreData(argumentCaptor.capture()); inOrder.verify(mJobScheduler, never()).scheduleJob(); assertSame( ((EncodedImage) argumentCaptor.getValue()) .getUnderlyingReferenceTestOnly(), ref2.getUnderlyingReferenceTestOnly()); // same scan; ignore PooledByteBuffer pooledByteBuffer3 = mockPooledByteBuffer(220); CloseableReference<PooledByteBuffer> ref3 = CloseableReference.of(pooledByteBuffer3); EncodedImage encodedImage3 = mockEncodedImage(ref3); when(mJobScheduler.updateJob(encodedImage3, false)).thenReturn(true); when(mProgressiveJpegParser.parseMoreData(encodedImage3)).thenReturn(true); when(mProgressiveJpegParser.getBestScanNumber()).thenReturn(PREVIEW_SCAN); consumer.onNewResult(encodedImage3, false); inOrder.verify(mJobScheduler).updateJob(encodedImage3, false); inOrder.verify(mProgressiveJpegParser).parseMoreData(argumentCaptor.capture()); inOrder.verify(mJobScheduler, never()).scheduleJob(); assertSame( ((EncodedImage) argumentCaptor.getValue()) .getUnderlyingReferenceTestOnly(), ref3.getUnderlyingReferenceTestOnly()); // scan not for decode; ignore PooledByteBuffer pooledByteBuffer4 = mockPooledByteBuffer(300); CloseableReference<PooledByteBuffer> ref4 = CloseableReference.of(pooledByteBuffer4); EncodedImage encodedImage4 = mockEncodedImage(ref4); when(mJobScheduler.updateJob(encodedImage4, false)).thenReturn(true); when(mProgressiveJpegParser.parseMoreData(encodedImage4)).thenReturn(true); when(mProgressiveJpegParser.getBestScanNumber()).thenReturn(IGNORED_SCAN); consumer.onNewResult(encodedImage4, false); inOrder.verify(mJobScheduler).updateJob(encodedImage4, false); inOrder.verify(mProgressiveJpegParser).parseMoreData(argumentCaptor.capture()); inOrder.verify(mJobScheduler, never()).scheduleJob(); assertSame( ((EncodedImage) argumentCaptor.getValue()) .getUnderlyingReferenceTestOnly(), ref4.getUnderlyingReferenceTestOnly()); // good-enough scan; schedule PooledByteBuffer pooledByteBuffer5 = mockPooledByteBuffer(500); CloseableReference<PooledByteBuffer> ref5 = CloseableReference.of(pooledByteBuffer5); EncodedImage encodedImage5 = mockEncodedImage(ref5); when(mJobScheduler.updateJob(encodedImage5, false)).thenReturn(true); when(mProgressiveJpegParser.parseMoreData(encodedImage5)).thenReturn(true); when(mProgressiveJpegParser.getBestScanNumber()).thenReturn(GOOD_ENOUGH_SCAN); consumer.onNewResult(encodedImage5, false); inOrder.verify(mJobScheduler).updateJob(encodedImage5, false); inOrder.verify(mProgressiveJpegParser).parseMoreData(argumentCaptor.capture()); inOrder.verify(mJobScheduler).scheduleJob(); assertSame( ((EncodedImage) argumentCaptor.getValue()) .getUnderlyingReferenceTestOnly(), ref5.getUnderlyingReferenceTestOnly()); } @Test public void testFailure() { setupNetworkUri(); Consumer<EncodedImage> consumer = produceResults(); Exception exception = mock(Exception.class); consumer.onFailure(exception); verify(mConsumer).onFailure(exception); } @Test public void testCancellation() { setupNetworkUri(); Consumer<EncodedImage> consumer = produceResults(); consumer.onCancellation(); verify(mConsumer).onCancellation(); } @Test public void testDecode_Final() throws Exception { setupNetworkUri(); produceResults(); JobScheduler.JobRunnable jobRunnable = getJobRunnable(); jobRunnable.run(mEncodedImage, true); InOrder inOrder = inOrder(mProducerListener, mImageDecoder); inOrder.verify(mProducerListener).onProducerStart(mRequestId, DecodeProducer.PRODUCER_NAME); inOrder.verify(mImageDecoder).decodeImage( mEncodedImage, IMAGE_SIZE, ImmutableQualityInfo.FULL_QUALITY, IMAGE_DECODE_OPTIONS); inOrder.verify(mProducerListener).onProducerFinishWithSuccess( eq(mRequestId), eq(DecodeProducer.PRODUCER_NAME), any(Map.class)); } @Test public void testDecode_Intermediate_pJPEG() throws Exception { setupNetworkUri(); produceResults(); JobScheduler.JobRunnable jobRunnable = getJobRunnable(); when(mProgressiveJpegParser.isJpeg()).thenReturn(true); when(mProgressiveJpegParser.getBestScanEndOffset()).thenReturn(200); when(mProgressiveJpegParser.getBestScanNumber()).thenReturn(PREVIEW_SCAN); jobRunnable.run(mEncodedImage, false); InOrder inOrder = inOrder(mProducerListener, mImageDecoder); inOrder.verify(mProducerListener).onProducerStart(mRequestId, DecodeProducer.PRODUCER_NAME); inOrder.verify(mImageDecoder).decodeImage( mEncodedImage, 200, ImmutableQualityInfo.of(PREVIEW_SCAN, false, false), IMAGE_DECODE_OPTIONS); inOrder.verify(mProducerListener).onProducerFinishWithSuccess( eq(mRequestId), eq(DecodeProducer.PRODUCER_NAME), any(Map.class)); inOrder.verifyNoMoreInteractions(); } @Test public void testDecode_Failure() throws Exception { setupNetworkUri(); produceResults(); JobScheduler.JobRunnable jobRunnable = getJobRunnable(); Exception exception = new RuntimeException(); when(mImageDecoder.decodeImage( mEncodedImage, IMAGE_SIZE, ImmutableQualityInfo.FULL_QUALITY, IMAGE_DECODE_OPTIONS)) .thenThrow(exception); jobRunnable.run(mEncodedImage, true); InOrder inOrder = inOrder(mProducerListener, mImageDecoder); inOrder.verify(mProducerListener).onProducerStart(mRequestId, DecodeProducer.PRODUCER_NAME); inOrder.verify(mImageDecoder).decodeImage( mEncodedImage, IMAGE_SIZE, ImmutableQualityInfo.FULL_QUALITY, IMAGE_DECODE_OPTIONS); inOrder.verify(mProducerListener).onProducerFinishWithFailure( eq(mRequestId), eq(DecodeProducer.PRODUCER_NAME), eq(exception), any(Map.class)); } private void setupNetworkUri() { //Uri.parse("file://path/image") mImageRequest = ImageRequestBuilder.newBuilderWithSource(Uri.parse("http://www.fb.com/image")) .setProgressiveRenderingEnabled(true) .setImageDecodeOptions(IMAGE_DECODE_OPTIONS) .build(); mRequestId = "networkRequest1"; mProducerContext = new SettableProducerContext( mImageRequest, mRequestId, mProducerListener, mock(Object.class), ImageRequest.RequestLevel.FULL_FETCH, /* isPrefetch */ false, /* isIntermediateResultExpected */ true, Priority.MEDIUM); } private void setupLocalUri() { mImageRequest = ImageRequestBuilder.newBuilderWithSource(Uri.parse("file://path/image")) .setProgressiveRenderingEnabled(true) // this should be ignored .setImageDecodeOptions(IMAGE_DECODE_OPTIONS) .build(); mRequestId = "localRequest1"; mProducerContext = new SettableProducerContext( mImageRequest, mRequestId, mProducerListener, mock(Object.class), ImageRequest.RequestLevel.FULL_FETCH, /* isPrefetch */ false, /* isIntermediateResultExpected */ true, Priority.MEDIUM); } private Consumer<EncodedImage> produceResults() { mDecodeProducer.produceResults(mConsumer, mProducerContext); ArgumentCaptor<Consumer> consumerCaptor = ArgumentCaptor.forClass(Consumer.class); verify(mInputProducer).produceResults(consumerCaptor.capture(), eq(mProducerContext)); return consumerCaptor.getValue(); } private JobScheduler.JobRunnable getJobRunnable() throws Exception { ArgumentCaptor<JobScheduler.JobRunnable> runnableCaptor = ArgumentCaptor.forClass(JobScheduler.JobRunnable.class); PowerMockito.verifyNew(JobScheduler.class) .withArguments(eq(mExecutor), runnableCaptor.capture(), anyInt()); return runnableCaptor.getValue(); } private static PooledByteBuffer mockPooledByteBuffer(int size) { PooledByteBuffer pooledByteBuffer = mock(PooledByteBuffer.class); when(pooledByteBuffer.size()).thenReturn(size); return pooledByteBuffer; } }
package com.datazuul.apps.bookmarks; /* * Copyright 1999 Sun Microsystems, Inc. All Rights Reserved. * * Redistribution and use in source and binary forms, with or * without modification, are permitted provided that the following * conditions are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistribution in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * Neither the name of Sun Microsystems, Inc. or the names of * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * This software is provided "AS IS," without a warranty of any * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY * EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY * DAMAGES OR LIABILITIES SUFFERED BY LICENSEE AS A RESULT OF OR * RELATING TO USE, MODIFICATION OR DISTRIBUTION OF THIS SOFTWARE OR * ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE * FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, * SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER * CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF * THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS * BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. * * You acknowledge that this software is not designed, licensed or * intended for use in the design, construction, operation or * maintenance of any nuclear facility. */ import java.awt.Color; import java.awt.Container; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.File; import java.text.DateFormat; import java.util.Date; import javax.swing.ButtonGroup; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JRadioButtonMenuItem; import javax.swing.JScrollPane; import javax.swing.SwingUtilities; import javax.swing.UIManager; import javax.swing.table.DefaultTableCellRenderer; /** * Assembles the UI. The UI consists of a JTreeTable and a menu. * The JTreeTable uses a BookmarksModel to visually represent a bookmarks * file stored in the Netscape file format. * * @author Scott Violet */ public class TreeTableExample3 { /** Number of instances of TreeTableExample3. */ private static int ttCount; /** Used to represent the model. */ private JTreeTable treeTable; /** Frame containing everything. */ private JFrame frame; /** Path created for. */ private String path; /** * Creates a TreeTableExample3, loading the bookmarks from the file * at <code>path</code>. */ public TreeTableExample3(String path) { this.path = path; ttCount++; frame = createFrame(); Container cPane = frame.getContentPane(); JMenuBar mb = createMenuBar(); TreeTableModel model = createModel(path); treeTable = createTreeTable(model); JScrollPane sp = new JScrollPane(treeTable); sp.getViewport().setBackground(Color.white); cPane.add(sp); frame.setJMenuBar(mb); frame.pack(); frame.show(); } /** * Creates and returns the instanceof JTreeTable that will be used. */ protected JTreeTable createTreeTable(TreeTableModel model) { JTreeTable treeTable = new JTreeTable(model); treeTable.setDefaultRenderer(Date.class, new BookmarksDateRenderer()); treeTable.setDefaultRenderer(Object.class, new BookmarksStringRenderer()); return treeTable; } /** * Creates the BookmarksModel for the file at <code>path</code>. */ protected TreeTableModel createModel(String path) { Bookmarks bookmarks = new Bookmarks(path); return new BookmarksModel(bookmarks.getRoot()); } /** * Creates the JFrame that will contain everything. */ protected JFrame createFrame() { JFrame retFrame = new JFrame("TreeTable III -- " + path); retFrame.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent we) { frame.dispose(); if (--ttCount == 0) { System.exit(0); } } }); return retFrame; } /** * Creates a menu bar. */ protected JMenuBar createMenuBar() { JMenu fileMenu = new JMenu("File"); JMenuItem menuItem; menuItem = new JMenuItem("Open"); menuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent ae) { JFileChooser fc = new JFileChooser(path); int result = fc.showOpenDialog(frame); if (result == JFileChooser.APPROVE_OPTION) { String newPath = fc.getSelectedFile().getPath(); new TreeTableExample3(newPath); } } }); fileMenu.add(menuItem); fileMenu.addSeparator(); menuItem = new JMenuItem("Exit"); menuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent ae) { System.exit(0); } }); fileMenu.add(menuItem); // Create a menu bar JMenuBar menuBar = new JMenuBar(); menuBar.add(fileMenu); // Menu for the look and feels (lafs). UIManager.LookAndFeelInfo[] lafs = UIManager. getInstalledLookAndFeels(); ButtonGroup lafGroup = new ButtonGroup(); JMenu optionsMenu = new JMenu("Options"); menuBar.add(optionsMenu); for(int i = 0; i < lafs.length; i++) { JRadioButtonMenuItem rb = new JRadioButtonMenuItem(lafs[i]. getName()); optionsMenu.add(rb); rb.setSelected(UIManager.getLookAndFeel().getName().equals (lafs[i].getName())); rb.putClientProperty("UIKey", lafs[i]); rb.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent ae) { JRadioButtonMenuItem rb2 = (JRadioButtonMenuItem)ae. getSource(); if(rb2.isSelected()) { UIManager.LookAndFeelInfo info = (UIManager.LookAndFeelInfo) rb2.getClientProperty("UIKey"); try { UIManager.setLookAndFeel(info.getClassName()); SwingUtilities.updateComponentTreeUI(frame); } catch (Exception e) { System.err.println("unable to set UI " + e.getMessage()); } } } }); lafGroup.add(rb); } return menuBar; } /** * The renderer used for Dates in the TreeTable. The only thing it does, * is to format a null date as '---'. */ private static class BookmarksDateRenderer extends DefaultTableCellRenderer { DateFormat formatter; public BookmarksDateRenderer() { super(); } public void setValue(Object value) { if (formatter==null) { formatter = DateFormat.getDateInstance(); } setText((value == null) ? "---" : formatter.format(value)); } } /** * The renderer used for String in the TreeTable. The only thing it does, * is to format a null String as '---'. */ static class BookmarksStringRenderer extends DefaultTableCellRenderer { public BookmarksStringRenderer() { super(); } public void setValue(Object value) { setText((value == null) ? "---" : value.toString()); } } public static void main(String[] args) { if (args.length > 0) { // User is specifying the bookmark file to show. for (int counter = args.length - 1; counter >= 0; counter--) { new TreeTableExample3(args[counter]); } } else { // No file specified, see if the user has one in their home // directory. String path; try { path = System.getProperty("user.home"); if (path != null) { path += File.separator + ".netscape" + File.separator + "bookmarks.html"; File file = new File(path); if (!file.exists()) { path = null; } } } catch (Throwable th) { path = null; } if (path == null) { // None available, use a default. path = "bookmarks.html"; } new TreeTableExample3(path); } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2012 psiinon@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.autoupdate; import java.awt.Component; import java.awt.Cursor; import java.awt.Desktop; import java.awt.EventQueue; import java.awt.GridBagLayout; import java.awt.HeadlessException; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.awt.event.WindowEvent; import java.io.File; import java.net.URL; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.swing.AbstractAction; import javax.swing.BorderFactory; import javax.swing.Icon; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTabbedPane; import javax.swing.RowSorter; import javax.swing.SortOrder; import javax.swing.KeyStroke; import javax.swing.border.TitledBorder; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.event.TableModelEvent; import javax.swing.event.TableModelListener; import org.apache.log4j.Logger; import org.jdesktop.swingx.decorator.AbstractHighlighter; import org.jdesktop.swingx.decorator.ComponentAdapter; import org.jdesktop.swingx.decorator.CompoundHighlighter; import org.jdesktop.swingx.decorator.HighlightPredicate; import org.jdesktop.swingx.decorator.IconHighlighter; import org.jdesktop.swingx.renderer.DefaultTableRenderer; import org.jdesktop.swingx.renderer.IconAware; import org.jdesktop.swingx.renderer.IconValues; import org.jdesktop.swingx.renderer.MappedValue; import org.jdesktop.swingx.renderer.StringValues; import org.parosproxy.paros.Constant; import org.parosproxy.paros.extension.Extension; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.view.AbstractFrame; import org.parosproxy.paros.view.View; import org.zaproxy.zap.control.AddOn; import org.zaproxy.zap.control.AddOnCollection; import org.zaproxy.zap.utils.DesktopUtils; import org.zaproxy.zap.utils.FontUtils; import org.zaproxy.zap.view.LayoutHelper; import org.zaproxy.zap.view.ZapTable; import org.zaproxy.zap.view.panels.TableFilterPanel; public class ManageAddOnsDialog extends AbstractFrame implements CheckForUpdateCallback { protected enum State {IDLE, DOWNLOADING_ZAP, DOWNLOADED_ZAP, DOWNLOADING_UPDATES, DOWNLOADED_UPDATES} static final Icon ICON_ADD_ON_ISSUES = new ImageIcon( InstalledAddOnsTableModel.class.getResource("/resource/icon/16/050.png")); static final Icon ICON_ADD_ON_EXTENSION_ISSUES = new ImageIcon( InstalledAddOnsTableModel.class.getResource("/resource/icon/fugue/information-white.png")); private static final Logger logger = Logger.getLogger(ManageAddOnsDialog.class); private static final long serialVersionUID = 1L; private JTabbedPane jTabbed = null; private JPanel topPanel = null; private JPanel installedPanel = null; private JPanel browsePanel = null; private JPanel corePanel = null; private JPanel installedAddOnsPanel = null; private JPanel installedAddOnsFilterPanel = null; private JPanel uninstalledAddOnsPanel = null; private JPanel uninstalledAddOnsFilterPanel = null; private JPanel retrievePanel = null; private JScrollPane marketPlaceScrollPane = null; private JButton addOnInfoButton = null; private JButton coreNotesButton = null; private JButton downloadZapButton = null; private JButton checkForUpdatesButton = null; private JButton updateButton = null; private JButton updateAllButton = null; private JButton uninstallButton = null; private JButton installAllButton; private JButton installButton = null; private JButton close1Button = null; private JButton close2Button = null; private JLabel downloadProgress = null; private JLabel updatesMessage = null; private ZapTable installedAddOnsTable = null; private ZapTable uninstalledAddOnsTable = null; //private ZapRelease latestRelease = null; private String currentVersion = null; private AddOnCollection latestInfo = null; private AddOnCollection prevInfo = null; private ExtensionAutoUpdate extension = null; private AddOnCollection installedAddOns; private final InstalledAddOnsTableModel installedAddOnsModel; private final UninstalledAddOnsTableModel uninstalledAddOnsModel; private State state = null; /** * @throws HeadlessException */ public ManageAddOnsDialog(ExtensionAutoUpdate ext, String currentVersion, AddOnCollection installedAddOns) throws HeadlessException { super(); this.extension = ext; this.currentVersion = currentVersion; this.installedAddOns = installedAddOns; installedAddOnsModel = new InstalledAddOnsTableModel(installedAddOns); uninstalledAddOnsModel = new UninstalledAddOnsTableModel(installedAddOns); initialize(); } /** * This method initializes this * */ private void initialize() { this.setTitle(Constant.messages.getString("cfu.manage.title")); //this.setContentPane(getJTabbed()); this.setContentPane(getTopPanel()); this.pack(); if (Model.getSingleton().getOptionsParam().getViewParam().getWmUiHandlingOption() == 0) { this.setSize(700, 500); } state = State.IDLE; // Handle escape key to close the dialog KeyStroke escape = KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0, false); AbstractAction escapeAction = new AbstractAction() { private static final long serialVersionUID = 3516424501887406165L; @Override public void actionPerformed(ActionEvent e) { dispatchEvent(new WindowEvent(ManageAddOnsDialog.this, WindowEvent.WINDOW_CLOSING)); } }; getRootPane().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(escape, "ESCAPE"); getRootPane().getActionMap().put("ESCAPE",escapeAction); } private JPanel getTopPanel() { if (topPanel == null) { topPanel = new JPanel(); topPanel.setLayout(new GridBagLayout()); topPanel.add(getJTabbed(), LayoutHelper.getGBC(0, 0, 1, 1.0D, 1.0D)); topPanel.add(this.getUpdatesMessage(), LayoutHelper.getGBC(0, 2, 1, 1.0D)); } return topPanel; } private JTabbedPane getJTabbed() { if (jTabbed == null) { jTabbed = new JTabbedPane(); jTabbed.addTab(Constant.messages.getString("cfu.tab.installed"), this.getInstalledPanel()); jTabbed.addTab(Constant.messages.getString("cfu.tab.browse"), this.getBrowsePanel()); } return jTabbed; } protected void selectMarketplaceTab() { getJTabbed().setSelectedIndex(1); } private JPanel getInstalledPanel() { if (installedPanel == null) { installedPanel = new JPanel(); installedPanel.setLayout(new GridBagLayout()); installedPanel.add(getCorePanel(true), LayoutHelper.getGBC(0, 0, 1, 1.0D, 0.0D)); installedPanel.add(getInstalledAddOnsPanel(), LayoutHelper.getGBC(0, 1, 1, 1.0D, 1.0D)); } return installedPanel; } private JPanel getBrowsePanel() { if (browsePanel == null) { browsePanel = new JPanel(); browsePanel.setLayout(new GridBagLayout()); browsePanel.add(getUninstalledAddOnsPanel(), LayoutHelper.getGBC(0, 0, 1, 1.0D, 1.0D)); } return browsePanel; } private JPanel getCorePanel(boolean update) { if (corePanel == null) { corePanel = new JPanel(); corePanel.setLayout(new GridBagLayout()); corePanel.setBorder( BorderFactory.createTitledBorder( null, Constant.messages.getString("cfu.label.zap.border"), TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, FontUtils.getFont(FontUtils.Size.standard), java.awt.Color.black)); if (latestInfo == null || this.latestInfo.getZapRelease() == null) { // Haven't checked for updates yet corePanel.add(new JLabel(this.currentVersion), LayoutHelper.getGBC(0, 0, 1, 0.0D)); corePanel.add(new JLabel(""), LayoutHelper.getGBC(1, 0, 1, 1.0D)); corePanel.add(this.getCheckForUpdatesButton(), LayoutHelper.getGBC(2, 0, 1, 0.0D)); } else if (this.latestInfo.getZapRelease().isNewerThan(this.currentVersion)) { corePanel.add(new JLabel(Constant.messages.getString("cfu.check.zap.newer")), LayoutHelper.getGBC(0, 0, 1, 0.0D)); corePanel.add(new JLabel(this.latestInfo.getZapRelease().getVersion()), LayoutHelper.getGBC(1, 0, 1, 0.1D)); corePanel.add(new JLabel(""), LayoutHelper.getGBC(2, 0, 1, 0.8D)); corePanel.add(this.getDownloadProgress(), LayoutHelper.getGBC(3, 0, 1, 0.2D)); corePanel.add(this.getCoreNotesButton(), LayoutHelper.getGBC(4, 0, 1, 0.0D)); corePanel.add(this.getDownloadZapButton(), LayoutHelper.getGBC(5, 0, 1, 0.0D)); } else { corePanel.add(new JLabel(this.currentVersion + " : " + Constant.messages.getString("cfu.check.zap.latest")), LayoutHelper.getGBC(0, 0, 1, 1.0D)); } } else if (update && latestInfo != null && this.latestInfo.getZapRelease() != null) { corePanel.removeAll(); if (this.latestInfo.getZapRelease().isNewerThan(this.currentVersion)) { corePanel.add(new JLabel(Constant.messages.getString("cfu.check.zap.newer")), LayoutHelper.getGBC(0, 0, 1, 0.0D)); corePanel.add(new JLabel(this.latestInfo.getZapRelease().getVersion()), LayoutHelper.getGBC(1, 0, 1, 0.1D)); corePanel.add(new JLabel(""), LayoutHelper.getGBC(2, 0, 1, 0.8D)); corePanel.add(this.getDownloadProgress(), LayoutHelper.getGBC(3, 0, 1, 0.2D)); corePanel.add(this.getCoreNotesButton(), LayoutHelper.getGBC(4, 0, 1, 0.0D)); corePanel.add(this.getDownloadZapButton(), LayoutHelper.getGBC(5, 0, 1, 0.0D)); } else { corePanel.add(new JLabel(this.currentVersion + " : " + Constant.messages.getString("cfu.check.zap.latest")), LayoutHelper.getGBC(0, 0, 1, 1.0D)); } installedPanel.validate(); } return corePanel; } private JPanel getInstalledAddOnsPanel() { if (installedAddOnsPanel == null) { installedAddOnsPanel = new JPanel(); installedAddOnsPanel.setLayout(new GridBagLayout()); installedAddOnsPanel.setBorder( BorderFactory.createTitledBorder( null, Constant.messages.getString("cfu.label.addons.border"), TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, FontUtils.getFont(FontUtils.Size.standard), java.awt.Color.black)); getInstalledAddOnsTable(); JScrollPane scrollPane = new JScrollPane(); scrollPane.setHorizontalScrollBarPolicy(javax.swing.JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); scrollPane.setViewportView(getInstalledAddOnsTable()); installedAddOnsFilterPanel = new TableFilterPanel<>(getInstalledAddOnsTable()); int row = 0; installedAddOnsPanel.add(installedAddOnsFilterPanel, LayoutHelper.getGBC(0, row++, 5, 0.0D)); installedAddOnsPanel.add(scrollPane, LayoutHelper.getGBC(0, row++, 5, 1.0D, 1.0D)); installedAddOnsPanel.add(new JLabel(""), LayoutHelper.getGBC(0, row, 1, 1.0D)); installedAddOnsPanel.add(getUninstallButton(), LayoutHelper.getGBC(1, row, 1, 0.0D)); installedAddOnsPanel.add(getUpdateButton(), LayoutHelper.getGBC(2, row, 1, 0.0D)); installedAddOnsPanel.add(getUpdateAllButton(), LayoutHelper.getGBC(3, row, 1, 0.0D)); installedAddOnsPanel.add(getClose1Button(), LayoutHelper.getGBC(4, row, 1, 0.0D)); } return installedAddOnsPanel; } private JPanel getUninstalledAddOnsPanel() { if (uninstalledAddOnsPanel == null) { uninstalledAddOnsPanel = new JPanel(); uninstalledAddOnsPanel.setLayout(new GridBagLayout()); uninstalledAddOnsPanel.setBorder( BorderFactory.createTitledBorder( null, Constant.messages.getString("cfu.label.addons.border"), TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, FontUtils.getFont(FontUtils.Size.standard), java.awt.Color.black)); uninstalledAddOnsFilterPanel = new TableFilterPanel<>(getUninstalledAddOnsTable()); if (latestInfo == null) { // Not checked yet getUninstalledAddOnsTable(); // To initialise the table and model getMarketPlaceScrollPane().setViewportView(getRetrievePanel()); uninstalledAddOnsFilterPanel.setVisible(false); } else { getMarketPlaceScrollPane().setViewportView(getUninstalledAddOnsTable()); uninstalledAddOnsFilterPanel.setVisible(true); } int row = 0; int column = 0; uninstalledAddOnsPanel.add(uninstalledAddOnsFilterPanel, LayoutHelper.getGBC(column, row++, 5, 0.0D)); uninstalledAddOnsPanel.add(getMarketPlaceScrollPane(), LayoutHelper.getGBC(column, row++, 5, 1.0D, 1.0D)); uninstalledAddOnsPanel.add(new JLabel(""), LayoutHelper.getGBC(column++, row, 1, 1.0D)); if (Constant.isDevMode()) { uninstalledAddOnsPanel.add(getInstallAllButton(), LayoutHelper.getGBC(column++, row, 1, 0.0D)); } uninstalledAddOnsPanel.add(getInstallButton(), LayoutHelper.getGBC(column++, row, 1, 0.0D)); uninstalledAddOnsPanel.add(getAddOnInfoButton(), LayoutHelper.getGBC(column++, row, 1, 0.0D)); uninstalledAddOnsPanel.add(getClose2Button(), LayoutHelper.getGBC(column, row, 1, 0.0D)); } return uninstalledAddOnsPanel; } private JScrollPane getMarketPlaceScrollPane () { if (marketPlaceScrollPane == null) { marketPlaceScrollPane = new JScrollPane(); marketPlaceScrollPane.setHorizontalScrollBarPolicy(javax.swing.JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); } return marketPlaceScrollPane; } private JPanel getRetrievePanel() { if (retrievePanel == null) { retrievePanel = new JPanel(); retrievePanel.setLayout(new GridBagLayout()); JButton retrieveButton = new JButton(); retrieveButton.setText(Constant.messages.getString("cfu.button.checkForUpdates")); retrieveButton.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { checkForUpdates(); } }); retrievePanel.add(new JLabel(""), LayoutHelper.getGBC(0, 0, 1, 1.0D)); retrievePanel.add(retrieveButton, LayoutHelper.getGBC(1, 0, 1, 0.0D)); retrievePanel.add(new JLabel(""), LayoutHelper.getGBC(2, 0, 1, 1.0D)); retrievePanel.add(new JLabel(""), LayoutHelper.getGBC(0, 1, 3, 1.0D, 1.0D)); } return retrievePanel; } protected void setPreviousVersionInfo(AddOnCollection prevInfo) { this.prevInfo = prevInfo; } protected void setLatestVersionInfo(AddOnCollection latestInfo) { this.latestInfo = latestInfo; getCorePanel(true); if (latestInfo != null) { installedAddOnsModel.setAvailableAddOns(latestInfo); uninstalledAddOnsModel.setAddOnCollection(latestInfo); List<AddOn> addOnsNotInstalled = installedAddOnsModel.updateEntries(); uninstalledAddOnsModel.setAddOns(addOnsNotInstalled, prevInfo); } getMarketPlaceScrollPane().setViewportView(getUninstalledAddOnsTable()); uninstalledAddOnsFilterPanel.setVisible(true); } private ZapTable getInstalledAddOnsTable () { if (installedAddOnsTable == null) { installedAddOnsTable = createCustomZapTable(); installedAddOnsModel.addTableModelListener(new TableModelListener() { @Override public void tableChanged(TableModelEvent e) { getUpdateButton().setEnabled(installedAddOnsModel.canUpdateSelected()); getUpdateAllButton().setEnabled(installedAddOnsModel.getAllUpdates().size() > 0); getUninstallButton().setEnabled(installedAddOnsModel.canUninstallSelected()); }}); installedAddOnsTable.setModel(installedAddOnsModel); installedAddOnsTable.getColumnModel().getColumn(0).setMaxWidth(20);//icon installedAddOnsTable.getColumnExt(0).setSortable(false);//icon doesn't need to be sortable installedAddOnsTable.getColumnModel().getColumn(1).setPreferredWidth(200);//name installedAddOnsTable.getColumnModel().getColumn(2).setPreferredWidth(60);//version installedAddOnsTable.getColumnExt(2).setSortable(false);//version doesn't need to be sortable installedAddOnsTable.getColumnModel().getColumn(3).setPreferredWidth(400);//description installedAddOnsTable.getColumnExt(3).setSortable(false);//description doesn't need to be sortable installedAddOnsTable.getColumnModel().getColumn(4).setPreferredWidth(60);//update installedAddOnsTable.getColumnExt(4).setSortable(false);//update doesn't need to be sortable installedAddOnsTable.getColumnModel().getColumn(5).setPreferredWidth(40); installedAddOnsTable.getColumnExt(5).setSortable(false);//checkbox doesn't need to be sortable //Default sort by name (column 1) List<RowSorter.SortKey> sortKeys = new ArrayList<RowSorter.SortKey>(1); sortKeys.add(new RowSorter.SortKey(1, SortOrder.ASCENDING)); installedAddOnsTable.getRowSorter().setSortKeys(sortKeys); DefaultAddOnToolTipHighlighter toolTipHighlighter = new DefaultAddOnToolTipHighlighter( AddOnsTableModel.COLUMN_ADD_ON_WRAPPER); for (int i = 1; i < installedAddOnsTable.getColumnCount(); i++) { installedAddOnsTable.getColumnExt(i).addHighlighter(toolTipHighlighter); } installedAddOnsTable.getColumnExt(0).setCellRenderer( new DefaultTableRenderer(new MappedValue(StringValues.EMPTY, IconValues.NONE), JLabel.CENTER)); installedAddOnsTable.getColumnExt(0).setHighlighters( new CompoundHighlighter( new WarningRunningIssuesHighlighter(AddOnsTableModel.COLUMN_ADD_ON_WRAPPER), new WarningRunningIssuesToolTipHighlighter(AddOnsTableModel.COLUMN_ADD_ON_WRAPPER))); installedAddOnsTable.getColumnExt(3).setHighlighters( new CompoundHighlighter( new WarningUpdateIssuesHighlighter(AddOnsTableModel.COLUMN_ADD_ON_WRAPPER), new WarningUpdateIssuesToolTipHighlighter(AddOnsTableModel.COLUMN_ADD_ON_WRAPPER))); installedAddOnsTable.getColumnExt(4).addHighlighter( new DisableSelectionHighlighter(AddOnsTableModel.COLUMN_ADD_ON_WRAPPER)); } return installedAddOnsTable; } private static ZapTable createCustomZapTable() { ZapTable zapTable = new ZapTable() { private static final long serialVersionUID = 1L; @Override protected AutoScrollAction createAutoScrollAction() { return null; } }; zapTable.setAutoScrollOnNewValues(false); return zapTable; } private ZapTable getUninstalledAddOnsTable () { if (uninstalledAddOnsTable == null) { uninstalledAddOnsTable = createCustomZapTable(); uninstalledAddOnsModel.addTableModelListener(new TableModelListener() { @Override public void tableChanged(TableModelEvent e) { getInstallButton().setEnabled(uninstalledAddOnsModel.canIinstallSelected()); getInstallAllButton().setEnabled(uninstalledAddOnsModel.hasAvailableAddOns()); }}); uninstalledAddOnsTable.getSelectionModel().addListSelectionListener(new ListSelectionListener(){ @Override public void valueChanged(ListSelectionEvent e) { getAddOnInfoButton().setEnabled(false); if (DesktopUtils.canOpenUrlInBrowser() && getUninstalledAddOnsTable ().getSelectedRowCount() == 1) { //convertRowIndexToModel in-case they sorted AddOnWrapper ao = uninstalledAddOnsModel.getAddOnWrapper(getUninstalledAddOnsTable().convertRowIndexToModel(getUninstalledAddOnsTable().getSelectedRow())); if (ao != null && ao.getAddOn().getInfo() != null) { getAddOnInfoButton().setEnabled(true); } } }}); uninstalledAddOnsTable.setModel(uninstalledAddOnsModel); uninstalledAddOnsTable.getColumnModel().getColumn(0).setMaxWidth(20);//Icon uninstalledAddOnsTable.getColumnExt(0).setSortable(false); //Icon doesn't need sorting uninstalledAddOnsTable.getColumnModel().getColumn(1).setPreferredWidth(50);//Status uninstalledAddOnsTable.getColumnModel().getColumn(2).setPreferredWidth(150);//Name uninstalledAddOnsTable.getColumnModel().getColumn(3).setPreferredWidth(300);//Description uninstalledAddOnsTable.getColumnExt(3).setSortable(false);//Description doesn't need sorting uninstalledAddOnsTable.getColumnModel().getColumn(4).setPreferredWidth(60);//Update (version number) uninstalledAddOnsTable.getColumnExt(4).setSortable(false);//Update doesn't need sorting uninstalledAddOnsTable.getColumnModel().getColumn(5).setPreferredWidth(40);//Checkbox uninstalledAddOnsTable.getColumnExt(5).setSortable(false);//Checkbox doesn't need sorting //Default sort by status (column 1) descending (Release, Beta, Alpha), and name (column 2) ascending List<RowSorter.SortKey> sortKeys = new ArrayList<RowSorter.SortKey>(2); sortKeys.add(new RowSorter.SortKey(1, SortOrder.DESCENDING)); sortKeys.add(new RowSorter.SortKey(2, SortOrder.ASCENDING)); uninstalledAddOnsTable.getRowSorter().setSortKeys(sortKeys); DefaultAddOnToolTipHighlighter toolTipHighlighter = new DefaultAddOnToolTipHighlighter( UninstalledAddOnsTableModel.COLUMN_ADD_ON_WRAPPER); for (int i = 1; i < uninstalledAddOnsTable.getColumnCount(); i++) { uninstalledAddOnsTable.getColumnExt(i).addHighlighter(toolTipHighlighter); } uninstalledAddOnsTable.getColumnExt(0).setCellRenderer( new DefaultTableRenderer(new MappedValue(StringValues.EMPTY, IconValues.NONE), JLabel.CENTER)); uninstalledAddOnsTable.getColumnExt(0).setHighlighters( new CompoundHighlighter( new WarningRunningIssuesHighlighter(AddOnsTableModel.COLUMN_ADD_ON_WRAPPER), new WarningRunningIssuesToolTipHighlighter(UninstalledAddOnsTableModel.COLUMN_ADD_ON_WRAPPER))); uninstalledAddOnsTable.getColumnExt(5).addHighlighter( new DisableSelectionHighlighter(UninstalledAddOnsTableModel.COLUMN_ADD_ON_WRAPPER)); } return uninstalledAddOnsTable; } private String addOnToHtml(AddOn ao) { StringBuilder sb = new StringBuilder(); sb.append("<html>"); sb.append("<table>"); sb.append("<tr><td><i>"); sb.append(Constant.messages.getString("cfu.table.header.name")); sb.append("</i></td><td>"); sb.append(ao.getName()); sb.append("</td></tr>"); sb.append("<tr><td><i>"); sb.append(Constant.messages.getString("cfu.table.header.status")); sb.append("</i></td><td>"); sb.append(Constant.messages.getString("cfu.status." + ao.getStatus().name())); sb.append("</td></tr>"); sb.append("<tr><td><i>"); sb.append(Constant.messages.getString("cfu.table.header.id")); sb.append("</i></td><td>"); sb.append(ao.getId()); sb.append("</td></tr>"); sb.append("<tr><td><i>"); sb.append(Constant.messages.getString("cfu.table.header.desc")); sb.append("</i></td><td>"); sb.append(ao.getDescription()); sb.append("</td></tr>"); sb.append("<tr><td><i>"); sb.append(Constant.messages.getString("cfu.table.header.author")); sb.append("</i></td><td>"); sb.append(ao.getAuthor()); sb.append("</td></tr>"); sb.append("<tr><td><i>"); sb.append(Constant.messages.getString("cfu.table.header.changes")); sb.append("</i></td><td>"); sb.append(ao.getChanges()); sb.append("</td></tr>"); sb.append("<tr><td><i>"); sb.append(Constant.messages.getString("cfu.table.header.version")); sb.append("</i></td><td>"); sb.append(ao.getVersion()); sb.append("</td></tr>"); sb.append("<tr><td><i>"); sb.append(Constant.messages.getString("cfu.table.header.notbefore")); sb.append("</i></td><td>"); sb.append(ao.getNotBeforeVersion()); sb.append("</td></tr>"); sb.append("<tr><td><i>"); sb.append(Constant.messages.getString("cfu.table.header.notfrom")); sb.append("</i></td><td>"); sb.append(ao.getNotFromVersion()); sb.append("</td></tr>"); if (!ao.getIdsAddOnDependencies().isEmpty()) { sb.append("<tr><td><i>"); sb.append(Constant.messages.getString("cfu.table.header.dependencies")); sb.append("</i></td><td>"); for (String addOnId : ao.getIdsAddOnDependencies()) { AddOn dep = installedAddOns.getAddOn(addOnId); if (dep == null && latestInfo != null) { dep = latestInfo.getAddOn(addOnId); } if (dep != null) { sb.append(dep.getName()); } else { sb.append(addOnId); } sb.append("<br>"); } sb.append("</td></tr>"); } sb.append("</table>"); sb.append("</html>"); return sb.toString(); } private JLabel getUpdatesMessage() { if (this.updatesMessage == null) { this.updatesMessage = new JLabel(" "); } return this.updatesMessage; } private JButton getCoreNotesButton() { if (coreNotesButton == null) { coreNotesButton = new JButton(); coreNotesButton.setIcon(new ImageIcon(ManageAddOnsDialog.class.getResource("/resource/icon/16/022.png"))); // 'Text file' icon coreNotesButton.setToolTipText(Constant.messages.getString("cfu.button.zap.relnotes")); final ManageAddOnsDialog dialog = this; coreNotesButton.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { URL url = latestInfo.getZapRelease().getReleaseNotesUrl(); if (url != null && DesktopUtils.canOpenUrlInBrowser()) { if (DesktopUtils.openUrlInBrowser(url.toString())) { // It worked :) return; } } StringBuilder sb = new StringBuilder(); sb.append("<html>"); sb.append(Constant.messages.getString("cfu.title.relnotes", latestInfo.getZapRelease().getVersion())); // Reformat the notes into html - the leading and trailing whitespace does need to be removed for some reason String []strs = latestInfo.getZapRelease().getReleaseNotes().split("\n"); for (String s : strs) { sb.append(s.replace("&lt;", "<").trim()); } sb.append("</html>"); View.getSingleton().showMessageDialog(dialog, sb.toString()); } }); } return coreNotesButton; } private JButton getClose1Button() { if (close1Button == null) { close1Button = new JButton(); close1Button.setText(Constant.messages.getString("all.button.close")); close1Button.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { ManageAddOnsDialog.this.dispose(); } }); } return close1Button; } private JButton getClose2Button() { if (close2Button == null) { close2Button = new JButton(); close2Button.setText(Constant.messages.getString("all.button.close")); close2Button.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { ManageAddOnsDialog.this.dispose(); } }); } return close2Button; } private JLabel getDownloadProgress() { if (downloadProgress == null) { downloadProgress = new JLabel(""); } return downloadProgress; } private JButton getCheckForUpdatesButton() { if (checkForUpdatesButton == null) { checkForUpdatesButton = new JButton(); checkForUpdatesButton.setText(Constant.messages.getString("cfu.button.checkForUpdates")); checkForUpdatesButton.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { checkForUpdates(); } }); } return checkForUpdatesButton; } protected void checkForUpdates() { this.setCursor(new Cursor(Cursor.WAIT_CURSOR)); extension.getLatestVersionInfo(this); this.setCursor(Cursor.getDefaultCursor()); } private JButton getDownloadZapButton() { if (downloadZapButton == null) { downloadZapButton = new JButton(); if (Constant.isKali()) { getDownloadZapButton().setText(Constant.messages.getString("cfu.button.zap.options")); } else { downloadZapButton.setText(Constant.messages.getString("cfu.button.zap.download")); } downloadZapButton.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { if (extension.downloadLatestRelease()) { setDownloadingZap(); } } }); } return downloadZapButton; } protected void setDownloadingZap() { downloadZapButton.setEnabled(false); getUpdateButton().setEnabled(false); // Makes things less complicated getUpdateAllButton().setEnabled(false); state = State.DOWNLOADING_ZAP; getUpdatesMessage().setText(Constant.messages.getString("cfu.check.zap.downloading")); } protected void setDownloadingUpdates() { if (EventQueue.isDispatchThread()) { this.getDownloadZapButton().setEnabled(false); // Makes things less complicated this.getUpdateButton().setEnabled(false); this.getUpdateAllButton().setEnabled(false); this.state = State.DOWNLOADING_UPDATES; this.getUpdatesMessage().setText(Constant.messages.getString("cfu.check.upd.downloading")); } else { EventQueue.invokeLater(new Runnable() { @Override public void run() { setDownloadingUpdates(); } }); } } /** * Notifies that the given {@code addOn} is being downloaded. * * @param addOn the add-on that is being downloaded * @since 2.4.0 */ public void notifyAddOnDownloading(AddOn addOn) { if (installedAddOnsModel.notifyAddOnDownloading(addOn)) { // It's an update... return; } uninstalledAddOnsModel.notifyAddOnDownloading(addOn); } /** * Notifies that the download of the add-on with the given {@code url} as failed. * <p> * The entry of the add-on is updated to report that the download failed. * * @param url the URL of the add-on that was being downloaded * @since 2.4.0 */ public void notifyAddOnDownloadFailed(String url) { if (installedAddOnsModel.notifyAddOnDownloadFailed(url)) { // It's an update... return; } uninstalledAddOnsModel.notifyAddOnDownloadFailed(url); } /** * Notifies that the given {@code addOn} was installed. The add-on is added to the table of installed add-ons, or if an * update, set it as updated, and, if available in marketplace, removed from the table of available add-ons. * * @param addOn the add-on that was installed * @since 2.4.0 */ public void notifyAddOnInstalled(final AddOn addOn) { if (EventQueue.isDispatchThread()) { if (latestInfo != null && latestInfo.getAddOn(addOn.getId()) != null) { uninstalledAddOnsModel.removeAddOn(addOn); } installedAddOnsModel.addOrRefreshAddOn(addOn); } else { EventQueue.invokeLater(new Runnable() { @Override public void run() { notifyAddOnInstalled(addOn); } }); } } /** * Notifies that the given {@code addOn} as not successfully uninstalled. Add-ons that were not successfully uninstalled are * not re-selectable. * * @param addOn the add-on that was not successfully uninstalled * @since 2.4.0 */ public void notifyAddOnFailedUninstallation(final AddOn addOn) { if (EventQueue.isDispatchThread()) { installedAddOnsModel.notifyAddOnFailedUninstallation(addOn); } else { EventQueue.invokeLater(new Runnable() { @Override public void run() { notifyAddOnFailedUninstallation(addOn); } }); } } /** * Notifies that the given {@code addOn} as uninstalled. The add-on is removed from the table of installed add-ons and, if * available in marketplace, added to the table of available add-ons. * * @param addOn the add-on that was uninstalled * @since 2.4.0 */ public void notifyAddOnUninstalled(final AddOn addOn) { if (EventQueue.isDispatchThread()) { installedAddOnsModel.removeAddOn(addOn); if (latestInfo != null) { AddOn availableAddOn = latestInfo.getAddOn(addOn.getId()); if (availableAddOn != null) { uninstalledAddOnsModel.addAddOn(latestInfo.getAddOn(addOn.getId())); } } } else { EventQueue.invokeLater(new Runnable() { @Override public void run() { notifyAddOnUninstalled(addOn); } }); } } private JButton getUpdateButton() { if (updateButton == null) { updateButton = new JButton(); updateButton.setText(Constant.messages.getString("cfu.button.addons.update")); updateButton.setEnabled(false); // Nothing will be selected initially updateButton.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { processUpdates(installedAddOnsModel.getSelectedUpdates()); } }); } return updateButton; } private JButton getUpdateAllButton() { if (updateAllButton == null) { updateAllButton = new JButton(); updateAllButton.setText(Constant.messages.getString("cfu.button.addons.updateAll")); updateAllButton.setEnabled(false); // Nothing will be selected initially updateAllButton.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { processUpdates(installedAddOnsModel.getAllUpdates()); } }); } return updateAllButton; } private void processUpdates(Set<AddOn> updatedAddOns) { if (updatedAddOns.isEmpty()) { return; } AddOnDependencyChecker calc = new AddOnDependencyChecker(installedAddOns, latestInfo); AddOnDependencyChecker.AddOnChangesResult result = calc.calculateUpdateChanges(updatedAddOns); if (!calc.confirmUpdateChanges(ManageAddOnsDialog.this, result)) { return; } extension.processAddOnChanges(ManageAddOnsDialog.this, result); } private JButton getUninstallButton() { if (uninstallButton == null) { uninstallButton = new JButton(); uninstallButton.setText(Constant.messages.getString("cfu.button.addons.uninstall")); uninstallButton.setEnabled(false); // Nothing will be selected initially uninstallButton.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { Set<AddOn> selectedAddOns = installedAddOnsModel.getSelectedAddOns(); if (selectedAddOns.isEmpty()) { return; } Set<AddOn> addOnsBeingDownloaded = installedAddOnsModel.getDownloadingAddOns(); addOnsBeingDownloaded.addAll(uninstalledAddOnsModel.getDownloadingAddOns()); AddOnDependencyChecker calc = new AddOnDependencyChecker(installedAddOns, latestInfo); AddOnDependencyChecker.UninstallationResult changes = calc.calculateUninstallChanges(selectedAddOns); if (!calc.confirmUninstallChanges(ManageAddOnsDialog.this, changes, addOnsBeingDownloaded)) { return; } Set<AddOn> addOns = changes.getUninstallations(); Set<Extension> extensions = changes.getExtensions(); if (!extension.warnUnsavedResourcesOrActiveActions(ManageAddOnsDialog.this, addOns, extensions, false)) { return; } extension.uninstallAddOnsWithView(ManageAddOnsDialog.this, addOns, false, new HashSet<AddOn>()); } }); } return uninstallButton; } private JButton getInstallAllButton() { if (installAllButton == null) { installAllButton = new JButton(); installAllButton.setEnabled(false); installAllButton.setText(Constant.messages.getString("cfu.button.addons.installall")); installAllButton.addActionListener(e -> installAddOns(uninstalledAddOnsModel.getAvailableAddOns())); } return installAllButton; } private JButton getInstallButton() { if (installButton == null) { installButton = new JButton(); installButton.setText(Constant.messages.getString("cfu.button.addons.install")); installButton.setEnabled(false); // Nothing will be selected initially installButton.addActionListener(e -> installAddOns(uninstalledAddOnsModel.getSelectedAddOns())); } return installButton; } private void installAddOns(Set<AddOn> addOns) { if (addOns.isEmpty()) { return; } AddOnDependencyChecker calc = new AddOnDependencyChecker(installedAddOns, latestInfo); AddOnDependencyChecker.AddOnChangesResult changes = calc.calculateInstallChanges(addOns); if (!calc.confirmInstallChanges(ManageAddOnsDialog.this, changes)) { return; } extension.processAddOnChanges(ManageAddOnsDialog.this, changes); } private JButton getAddOnInfoButton() { if (addOnInfoButton == null) { addOnInfoButton = new JButton(); addOnInfoButton.setText(Constant.messages.getString("cfu.button.addons.info")); addOnInfoButton.setEnabled(false); // Nothing will be selected initially addOnInfoButton.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { if (getUninstalledAddOnsTable().getSelectedRow() >= 0) { //convertRowIndexToModel in-case they sorted AddOnWrapper ao = uninstalledAddOnsModel.getAddOnWrapper(getUninstalledAddOnsTable().convertRowIndexToModel(getUninstalledAddOnsTable().getSelectedRow())); if (ao != null && ao.getAddOn().getInfo() != null) { DesktopUtils.openUrlInBrowser(ao.getAddOn().getInfo().toString()); } } } }); } return addOnInfoButton; } public void showProgress() { if (this.state.equals(State.DOWNLOADING_UPDATES)) { // Updates installedAddOnsModel.updateDownloadsProgresses(extension); // New addons uninstalledAddOnsModel.updateDownloadsProgresses(extension); if (extension.getCurrentDownloadCount() == 0) { this.state = State.DOWNLOADED_UPDATES; this.getDownloadZapButton().setEnabled(true); this.getUpdatesMessage().setText(Constant.messages.getString("cfu.check.upd.downloaded")); } } else if (this.state.equals(State.DOWNLOADING_ZAP)) { try { int progress = extension.getDownloadProgressPercent(this.latestInfo.getZapRelease().getUrl()); if (progress > 0) { this.getDownloadProgress().setText(progress + "%"); if (progress >= 100) { this.zapDownloadComplete(); } } } catch (Exception e) { logger.debug("Error on " + this.latestInfo.getZapRelease().getUrl(), e); this.getDownloadProgress().setText(Constant.messages.getString("cfu.table.label.failed")); } } } private void zapDownloadComplete () { if (this.state.equals(State.DOWNLOADED_ZAP)) { // Prevent re-entry return; } this.state = State.DOWNLOADED_ZAP; File f = new File(Constant.FOLDER_LOCAL_PLUGIN, latestInfo.getZapRelease().getFileName()); if (Desktop.isDesktopSupported()) { extension.promptToLaunchReleaseAndClose(this.latestInfo.getZapRelease().getVersion(), f); } else { View.getSingleton().showWarningDialog(this, Constant.messages.getString("cfu.warn.nolaunch", this.latestInfo.getZapRelease().getVersion(), f.getAbsolutePath())); } // Let people download updates now this.getUpdateButton().setEnabled(true); this.getUpdateAllButton().setEnabled(true); this.getUpdatesMessage().setText(Constant.messages.getString("cfu.check.zap.downloaded", f.getAbsolutePath())); } @Override public void gotLatestData(AddOnCollection aoc) { // Callback logger.debug("gotLatestData(AddOnCollection " + aoc); if (aoc != null) { setLatestVersionInfo(aoc); } else { View.getSingleton().showWarningDialog(this, Constant.messages.getString("cfu.check.failed")); } } @Override public void insecureUrl(String url, Exception cause) { logger.error("Failed to get check for updates on " + url, cause); View.getSingleton().showWarningDialog(this, Constant.messages.getString("cfu.warn.badurl")); } private static class DisableSelectionHighlighter extends AbstractHighlighter { public DisableSelectionHighlighter(final int columnIndex) { setHighlightPredicate(new HighlightPredicate() { @Override public boolean isHighlighted(final Component renderer, final ComponentAdapter adapter) { AddOn.InstallationStatus status = ((AddOnWrapper) adapter.getValue(columnIndex)).getInstallationStatus(); return AddOn.InstallationStatus.UNINSTALLATION_FAILED == status || AddOn.InstallationStatus.SOFT_UNINSTALLATION_FAILED == status || AddOn.InstallationStatus.DOWNLOADING == status; } }); } @Override protected Component doHighlight(Component renderer, ComponentAdapter adapter) { renderer.setEnabled(false); return renderer; } } private static class WarningRunningIssuesHighlighter extends IconHighlighter { private final int columnIndex; public WarningRunningIssuesHighlighter(int columnIndex) { super(); this.columnIndex = columnIndex; setHighlightPredicate(new HighlightPredicate.EqualsHighlightPredicate(Boolean.TRUE)); } public Icon getIcon(ComponentAdapter adapter) { AddOnWrapper aow = (AddOnWrapper) adapter.getValue(columnIndex); if (aow.isAddOnRunningIssues()) { return ICON_ADD_ON_ISSUES; } return ICON_ADD_ON_EXTENSION_ISSUES; } @Override protected Component doHighlight(Component component, ComponentAdapter adapter) { if (component instanceof IconAware) { ((IconAware) component).setIcon(getIcon(adapter)); } else if (component instanceof JLabel) { ((JLabel) component).setIcon(getIcon(adapter)); } return component; } } private static class WarningUpdateIssuesHighlighter extends IconHighlighter { private final int columnIndex; public WarningUpdateIssuesHighlighter(int columnIndex) { super(); this.columnIndex = columnIndex; setHighlightPredicate(new HighlightPredicate() { @Override public boolean isHighlighted(final Component renderer, final ComponentAdapter adapter) { AddOnWrapper aow = (AddOnWrapper) adapter.getValue(WarningUpdateIssuesHighlighter.this.columnIndex); if (AddOnWrapper.Status.newVersion == aow.getStatus()) { return aow.hasUpdateIssues(); } return false; } }); } public Icon getIcon(ComponentAdapter adapter) { AddOnWrapper aow = (AddOnWrapper) adapter.getValue(columnIndex); if (aow.isAddOnUpdateIssues()) { return ICON_ADD_ON_ISSUES; } return ICON_ADD_ON_EXTENSION_ISSUES; } @Override protected Component doHighlight(Component component, ComponentAdapter adapter) { if (component instanceof IconAware) { ((IconAware) component).setIcon(getIcon(adapter)); } else if (component instanceof JLabel) { ((JLabel) component).setIcon(getIcon(adapter)); } return component; } } private class DefaultAddOnToolTipHighlighter extends AbstractHighlighter { private final int column; public DefaultAddOnToolTipHighlighter(int column) { this.column = column; } @Override protected Component doHighlight(Component component, ComponentAdapter adapter) { ((JComponent) component).setToolTipText(getToolTip((AddOnWrapper) adapter.getValue(column))); return component; } protected String getToolTip(AddOnWrapper aow) { if (AddOn.InstallationStatus.UNINSTALLATION_FAILED == aow.getInstallationStatus() || AddOn.InstallationStatus.SOFT_UNINSTALLATION_FAILED == aow.getInstallationStatus()) { return addOnToHtml(aow.getAddOn()); } AddOn addOn = (aow.getAddOnUpdate() != null) ? aow.getAddOnUpdate() : aow.getAddOn(); return addOnToHtml(addOn); } } private class WarningRunningIssuesToolTipHighlighter extends DefaultAddOnToolTipHighlighter { public WarningRunningIssuesToolTipHighlighter(int column) { super(column); } @Override protected String getToolTip(AddOnWrapper aow) { if (aow.hasRunningIssues()) { return aow.getRunningIssues(); } return super.getToolTip(aow); } } private class WarningUpdateIssuesToolTipHighlighter extends DefaultAddOnToolTipHighlighter { public WarningUpdateIssuesToolTipHighlighter(int column) { super(column); } @Override protected String getToolTip(AddOnWrapper aow) { if (aow.hasUpdateIssues()) { return aow.getUpdateIssues(); } return super.getToolTip(aow); } } }
/******************************************************************************* * * This file is part of iBioSim. Please visit <http://www.async.ece.utah.edu/ibiosim> * for the latest version of iBioSim. * * Copyright (C) 2017 University of Utah * * This library is free software; you can redistribute it and/or modify it * under the terms of the Apache License. A copy of the license agreement is provided * in the file named "LICENSE.txt" included with this software distribution * and also available online at <http://www.async.ece.utah.edu/ibiosim/License>. * *******************************************************************************/ package edu.utah.ece.async.lema.verification.lpn.properties; import java.awt.List; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.Properties; import org.antlr.runtime.*; import org.antlr.runtime.tree.*; import edu.utah.ece.async.ibiosim.dataModels.util.exceptions.BioSimException; import edu.utah.ece.async.lema.verification.lpn.LPN; import edu.utah.ece.async.lema.verification.lpn.Translator; import edu.utah.ece.async.lema.verification.lpn.Variable; import org.antlr.runtime.CharStream; import org.antlr.runtime.CommonTokenStream; import org.antlr.runtime.RecognitionException; import org.antlr.runtime.TokenStream; /** * * * @author * @author Chris Myers * @author <a href="http://www.async.ece.utah.edu/ibiosim#Credits"> iBioSim Contributors </a> * @version %I% */ public class BuildProperty { //public static JFrame frame; static int numPlaces=0; static int numTransitions=0; static int numFailTransitions=0; static int numFailPlaces=0; static int numStartPlaces=0; static int numEndPlaces=0; static String pFirst = "p0"; static String pLast =""; static boolean loop = false; static List list = new List(); public static void buildProperty(String propFileName) throws IOException, RecognitionException, BioSimException { //String propertyId = JOptionPane.showInputDialog(frame, "Enter the SVA property name:", "Model ID", JOptionPane.PLAIN_MESSAGE); //System.out.println(propertyId); //if (propertyId!=null){ //String property = JOptionPane.showInputDialog(frame, "Enter the SVA property:", "Model", JOptionPane.PLAIN_MESSAGE); //CharStream charStream = new ANTLRStringStream(" wait(omega > 2.2, 20);\r\n" + //"assert(abc, 20); "); numPlaces=0; numTransitions=0; numFailTransitions=0; numFailPlaces=0; numStartPlaces=0; numEndPlaces=0; LPN lpn = new LPN(); //lpn.load(propFileName); String lpnFileString= propFileName.substring(0, propFileName.length()-4); String lpnFileName = lpnFileString.concat("lpn"); File lpnFile = new File(lpnFileName); lpnFile.createNewFile(); //String[] lpnPath = lpnFileName.split(separator); //System.out.println("No of places : "+numPlaces); //System.out.println("No of places : "+numPlaces); BufferedReader input = new BufferedReader(new FileReader(propFileName)); String line = input.readLine(); //StringBuffer sb2 = new StringBuffer(line); StringBuffer sb = new StringBuffer(line); //LhpnFile lpn = new LhpnFile(); while(line!=null){ line=input.readLine(); sb.append(line); } input.close(); String property = sb.toString(); //System.out.println("property: "+property+"\n"); CharStream charStream = new ANTLRStringStream(property); PropertyLexer lexer = new PropertyLexer(charStream); TokenStream tokenStream = new CommonTokenStream(lexer); PropertyParser parser = new PropertyParser(tokenStream); PropertyParser.program_return program; try { program = parser.program(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); //JOptionPane.showMessageDialog(Gui.frame, "Error parsing property file, check console.", "Parse Error", JOptionPane.ERROR_MESSAGE); return; } if (parser.getNumberOfSyntaxErrors()>0) { throw new BioSimException("Error parsing property file, check console.", "Parse Error"); } //System.out.println("tree: "+((Tree)program.tree).toStringTree()+"\n"); CommonTree r0 = program.tree; //System.out.println("parent :"+program.start.getText()); //int number = r0.getChildCount(); //System.out.println("NUMBER : "+number+"\n"); //printTree(r0, number); generateFile(r0, lpn,lpnFileName); } public static void generateFile(CommonTree r0, LPN lpn, String lpnFileName) throws BioSimException{ LPN lpnFinal = new LPN(); File lpnFile = new File(".lpn"); try { lpnFile.createNewFile(); } catch (IOException e) { e.printStackTrace(); } try { lpnFinal = generateLPN(r0, lpn, false, null); if(loop){ lpnFinal.addTransition("t" + numTransitions); lpnFinal.changeDelay("t" + numTransitions, "0"); numTransitions++; lpnFinal.addMovement(pLast,"t" +(numTransitions-1)); lpnFinal.addMovement("t" +(numTransitions-1), pFirst); loop=false; } } catch (IOException e) { e.printStackTrace(); } lpnFinal.save(lpnFileName); Translator t1 = new Translator(); t1.convertLPN2SBML(lpnFileName, ""); t1.setFilename(lpnFileName.replace(".lpn", ".xml")); t1.outputSBML(); } public static void printTree(CommonTree t, int number) { if ( t != null ) { StringBuffer sb = new StringBuffer(number); for ( int i = 0; i < number; i++ ) sb = sb.append(" "); for ( int i = 0; i < t.getChildCount(); i++ ) { System.out.println(sb.toString() + t.getChild(i).toString()); printTree((CommonTree)t.getChild(i), number+1); } } } public static LPN generateLPN(CommonTree t, LPN lpn2, boolean recursiveCall, String stayConditions) throws IOException { String enablingCond=""; String delay=""; String varType = " "; String varName = ""; LPN lpnObj= lpn2; if ( t != null ) { int childCount=0; if(recursiveCall){ childCount=1; } else{ childCount= t.getChildCount(); } //System.out.println("child count is : "+t.getChildCount()); for(int i=0;i<childCount;i++){ //System.out.println("child is : "+t.getChild(i)); } for(int i=0;i<childCount;i++){ CommonTree switchCaseTree= new CommonTree(); if(recursiveCall){ //System.out.println("Start of switch statement in recursive call:"+t); switchCaseTree=t; } else { //System.out.println("Start of switch statement not in recursive call:"+t.getChild(i)); switchCaseTree=(CommonTree)t.getChild(i); } switch(switchCaseTree.getType()) { case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.SENALWAYS : int senalways_pFirst; if (numPlaces == 0) { senalways_pFirst = 0; } else{ senalways_pFirst = (numPlaces-1); } // Create a variable for the sensitivity list. String[] sensitivityList = null; String[] sensitivityVariables = null; // Check if senalways has any children. if(switchCaseTree.getChildCount() > 0){ // Check if the zero-th child is a sensitivity list. CommonTree theList = (CommonTree) switchCaseTree.getChild(0); if(theList.getType() == edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.LPARA){ sensitivityList = new String[theList.getChildCount()]; sensitivityVariables = new String[theList.getChildCount()]; // Loop through the list gathering the elements. int elementCount = 0; for(Object obj : theList.getChildren()){ CommonTree listElement = (CommonTree) obj; sensitivityList[elementCount] = generateExpression(listElement); String storedVariableType = lpnObj.getVariable( sensitivityList[elementCount]).getType(); String storedVariable = "_" + sensitivityList[elementCount]; // Add the stay condition. if(stayConditions != null & elementCount > 0){ stayConditions += "&"; }else{ stayConditions = ""; } stayConditions += "(" + sensitivityList[elementCount] + "=" + storedVariable + ")"; sensitivityVariables[elementCount] = storedVariable; if(storedVariableType == "boolean"){ // lpnObj.addBoolean(sensitivityVariables[elementCount], "0"); lpnObj.addInput(sensitivityVariables[elementCount], storedVariableType, "0"); } else if (storedVariableType == "integer"){ lpnObj.addInteger(sensitivityVariables[elementCount], "0"); } else if (storedVariableType == "discrete"){ lpnObj.addInteger(sensitivityVariables[elementCount], "0"); } else if (storedVariableType == "continuous"){ Properties initCond = new Properties(); initCond.put("value", "0"); initCond.put("rate", "0"); lpnObj.addContinuousInput(sensitivityVariables[elementCount], initCond); } elementCount++; } // Remove the sensitivity list. switchCaseTree.deleteChild(0); if(numPlaces == 0){ lpnObj.addPlace("p" + 0, true); numPlaces++; } // Create the transition that stores the variables in the the list. lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); numTransitions++; lpnObj.addPlace("p" + numPlaces, false); numPlaces++; lpnObj.addMovement("p" + senalways_pFirst, "t" + (numTransitions-1)); lpnObj.addMovement("t" + (numTransitions-1), "p" + (numPlaces-1)); for(int k=0; k<sensitivityVariables.length; k++){ lpnObj.addIntAssign("t" + (numTransitions-1), sensitivityVariables[k], sensitivityList[k]); } senalways_pFirst = (numPlaces-1); } } // System.out.println("senalways child count is: " + senalways_count); lpnObj = generateLPN(switchCaseTree, lpnObj, false, stayConditions); // String senalways_pLast = "p" + (numPlaces-1); int senalways_pLast = (numPlaces -1); // loop=true; // if(loop){ lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); if(stayConditions != null){ lpnObj.addEnabling("t" + numTransitions, stayConditions); } numTransitions++; // lpnObj.addMovement(pLast,"t" +(numTransitions-1)); lpnObj.addMovement("p" + senalways_pLast,"t" +(numTransitions-1)); // lpnObj.addMovement("t" +(numTransitions-1), pFirst); lpnObj.addMovement("t" +(numTransitions-1), "p" + senalways_pFirst); // loop=false; // } pLast = "p" + (numPlaces); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; if(sensitivityList != null && sensitivityVariables != null){ // Create the enabling condition. enablingCond = "~(" + sensitivityVariables[0] + " = " + sensitivityList[0] + ")"; for(int k=1; k<sensitivityVariables.length; k++){ enablingCond += "& ~(" + sensitivityVariables[k] + " = " + sensitivityList[k] + ")"; } for(int k=0; k <= senalways_pLast - senalways_pFirst; k++){ lpnObj.addTransition("t" + numTransitions); // lpnObj.addEnabling("t" + numTransitions, // "~(" + sensitivityVariables[0] + // "=" + sensitivityList[0] + ")"); // enablingCond = "~( " + sensitivityVariables[0] + " = " + sensitivityList[0] + " )"; // enablingCond = "senalways2a1"; // enablingCond = sensitivityVariables[0]; lpnObj.addEnabling("t" + numTransitions, enablingCond); lpnObj.changeDelay("t" + numTransitions, "0"); lpnObj.addMovement("p" + (senalways_pFirst+k), "t" + numTransitions); lpnObj.addMovement("t" + numTransitions, pLast); numTransitions++; } } break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ALWAYS : //System.out.println("child count of always : "+switchCaseTree.getChildCount()); // if(numPlaces==0) pFirst="p0"; // else pFirst = "p"+(numPlaces-1); String always_pFirst; if (numPlaces == 0){ always_pFirst = "p0"; } else{ always_pFirst = "p"+(numPlaces-1); } //System.out.println("pFirst is :"+pFirst); //for(int q=0; q<switchCaseTree.getChildCount();q++){ lpnObj = generateLPN(switchCaseTree, lpnObj, false, stayConditions); // } String always_pLast = "p"+(numPlaces-1); // pLast="p"+(numPlaces-1); loop=true; if(loop){ lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); numTransitions++; // lpnObj.addMovement(pLast,"t" +(numTransitions-1)); lpnObj.addMovement(always_pLast,"t" +(numTransitions-1)); // lpnObj.addMovement("t" +(numTransitions-1), pFirst); lpnObj.addMovement("t" +(numTransitions-1), always_pFirst); loop=false; } pLast = "p" + (numPlaces); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.RCURL : pLast= "p"+(numPlaces-1); //loop=false; //System.out.println("pLast is :"+pLast); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.BOOLEAN : varType = "boolean"; varName = generateExpression((CommonTree)switchCaseTree.getChild(0)); lpnObj.addInput(varName, varType, "0"); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.REAL : varType = "continuous"; varName = generateExpression((CommonTree)switchCaseTree.getChild(0)); // lpnObj.addInput(varName, varType); lpnObj.addInput(varName, varType, "0"); Variable newVar = lpnObj.getVariable(varName); newVar.addInitRate("0"); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.INTEGER : varType = "integer"; varName = generateExpression((CommonTree)switchCaseTree.getChild(0)); lpnObj.addInput(varName, varType, "0"); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ASSERT : //System.out.println("Assert statement "); enablingCond= generateExpression((CommonTree)switchCaseTree.getChild(0)); //System.out.println("Assert statement enabling cond :"+enablingCond); delay= generateExpression((CommonTree)switchCaseTree.getChild(1)); if(numPlaces==0){ lpnObj.addPlace("p"+numPlaces, true); numPlaces++; } lpnObj.addTransition("t" + numTransitions); if(stayConditions != null){ lpnObj.addEnabling("t" +numTransitions, "(" + enablingCond + ")&" + stayConditions); }else{ lpnObj.addEnabling("t" +numTransitions, enablingCond); } lpnObj.changeDelay("t" +numTransitions, delay); numTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "t" +(numTransitions-1)); lpnObj.addTransition("tFail" + numFailTransitions); lpnObj.changeDelay("tFail" + numFailTransitions, "0"); lpnObj.getTransition("tFail" + numFailTransitions).setFail(true); if(stayConditions != null){ lpnObj.addEnabling("tFail" +numFailTransitions, "~("+enablingCond+")&" + stayConditions); }else{ lpnObj.addEnabling("tFail" +numFailTransitions, "~("+enablingCond+")"); } numFailTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "tFail" +(numFailTransitions-1)); lpnObj.addPlace("pFail"+numFailPlaces, false); numFailPlaces++; lpnObj.addMovement( "tFail" +(numFailTransitions-1),"pFail"+(numFailPlaces-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement( "t" +(numTransitions-1),"p"+(numPlaces-1)); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ASSERT_STABLE : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.WAIT_STABLE : //System.out.println("child is :"+(CommonTree)switchCaseTree.getChild(0)); enablingCond= generateExpression((CommonTree)switchCaseTree.getChild(0)); delay= generateExpression((CommonTree)switchCaseTree.getChild(1)); if(numPlaces==0){ lpnObj.addPlace("p"+numPlaces, true); numPlaces++; } lpnObj.addTransition("t" + numTransitions); lpnObj.addEnabling("t" +numTransitions, enablingCond); lpnObj.changeDelay("t" +numTransitions, delay); numTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "t" +(numTransitions-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement( "t" +(numTransitions-1),"p"+(numPlaces-1)); lpnObj.getTransition("t" +(numTransitions-1)).setPersistent(true); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ASSERT_UNTIL : String string1 = generateExpression((CommonTree)switchCaseTree.getChild(0)); String string2 = generateExpression((CommonTree)switchCaseTree.getChild(1)); enablingCond= string2; if(numPlaces==0){ lpnObj.addPlace("p"+numPlaces, true); numPlaces++; } lpnObj.addTransition("t" + numTransitions); lpnObj.addEnabling("t" +numTransitions, enablingCond); lpnObj.changeDelay("t" + numTransitions, "0"); numTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "t" +(numTransitions-1)); lpnObj.addTransition("tFail" + numFailTransitions); lpnObj.changeDelay("tFail" + numFailTransitions, "0"); lpnObj.getTransition("tFail" + numFailTransitions).setFail(true); numFailTransitions++; enablingCond = "~("+string1+") & "+"~("+string2+")"; lpnObj.addEnabling("tFail" +(numFailTransitions-1), enablingCond); lpnObj.addMovement("p"+(numPlaces-1), "tFail" +(numFailTransitions-1)); lpnObj.addPlace("pFail"+numFailPlaces, false); numFailPlaces++; lpnObj.addMovement( "tFail" +(numFailTransitions-1),"pFail"+(numFailPlaces-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement( "t" +(numTransitions-1),"p"+(numPlaces-1)); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ID : //System.out.println("Property name "); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.INT : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.NOT : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.PLUS : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.MINUS : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.MULT : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.DIV : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.MOD : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.EQUAL : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.NOT_EQUAL : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.GET : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.LET : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.GETEQ : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.LETEQ : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.AND : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.OR : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.SAMEAS : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.WAIT : //System.out.println("wait statement: "); int count = switchCaseTree.getChildCount(); if (count==1){ enablingCond= generateExpression((CommonTree)switchCaseTree.getChild(0)); if(numPlaces==0){ lpnObj.addPlace("p"+numPlaces, true); numPlaces++; } lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0");; if(stayConditions != null){ lpnObj.addEnabling("t" +numTransitions, "(" + enablingCond + ")&" + stayConditions); }else{ lpnObj.addEnabling("t" +numTransitions, enablingCond); } numTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "t" +(numTransitions-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement( "t" +(numTransitions-1),"p"+(numPlaces-1)); } else if(count==2){ enablingCond= generateExpression((CommonTree)switchCaseTree.getChild(0)); delay= generateExpression((CommonTree)switchCaseTree.getChild(1)); if(numPlaces==0){ lpnObj.addPlace("p"+numPlaces, true); numPlaces++; } lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); if(stayConditions != null){ lpnObj.addEnabling("t" +numTransitions, "(" + enablingCond + ")&" + stayConditions); }else{ lpnObj.addEnabling("t" +numTransitions, enablingCond); } numTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "t" +(numTransitions-1)); lpnObj.addTransition("tFail" + numFailTransitions); lpnObj.getTransition("tFail" + numFailTransitions).setFail(true); if(stayConditions != null){ lpnObj.addEnabling("tFail" +numFailTransitions, "~("+enablingCond+")&" + stayConditions); }else{ lpnObj.addEnabling("tFail" +numFailTransitions, "~("+enablingCond+")"); } lpnObj.changeDelay("tFail" +numFailTransitions, delay); numFailTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "tFail" +(numFailTransitions-1)); lpnObj.addPlace("pFail"+numFailPlaces, false); numFailPlaces++; lpnObj.addMovement( "tFail" +(numFailTransitions-1),"pFail"+(numFailPlaces-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement( "t" +(numTransitions-1),"p"+(numPlaces-1)); } break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.WAIT_DELAY: enablingCond = "t"; delay = generateExpression((CommonTree)switchCaseTree.getChild(0)); if(numPlaces==0){ lpnObj.addPlace("p"+numPlaces, true); numPlaces++; } lpnObj.addTransition("t" + numTransitions); lpnObj.addEnabling("t" +numTransitions, enablingCond); lpnObj.changeDelay("t" +numTransitions, delay); numTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "t" +(numTransitions-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement( "t" +(numTransitions-1),"p"+(numPlaces-1)); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.IF : boolean elsePartExists = false; //System.out.println("IF statement"); if(list.getItemCount()!=0){ list.removeAll(); } String condition= generateExpression((CommonTree)switchCaseTree.getChild(0)); list.add(condition); for(int j=0;j<switchCaseTree.getChildCount();j++){ if(switchCaseTree.getChild(j).getType()==edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ELSEIF){ condition= generateExpression((CommonTree)switchCaseTree.getChild(j).getChild(0)); list.add(condition); } } if(numPlaces==0){ lpnObj.addPlace("pStart"+numStartPlaces, true); pFirst= "pStart"+numStartPlaces; numStartPlaces++; lpnObj.addPlace("pEnd"+numEndPlaces, false); numEndPlaces++; } else{ pFirst= "p0"; lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); numTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "t" +(numTransitions-1)); lpnObj.addPlace("pStart"+numStartPlaces, false); numStartPlaces++; lpnObj.addMovement( "t" +(numTransitions-1),"pStart"+(numStartPlaces-1)); lpnObj.addPlace("pEnd"+numEndPlaces, false); numEndPlaces++; } for(int x=0;x<list.getItemCount();x++){ //System.out.println("list is : "+list.getItem(x)+"\n"); } for(int j=0;j<switchCaseTree.getChildCount();j++){ if(j==0){ enablingCond= generateExpression((CommonTree)switchCaseTree.getChild(0)); lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); StringBuffer sb = new StringBuffer(); String newEnablingCond1= ""; for(int m=0;m<list.getItemCount();m++){ if(list.getItem(m).toString().equalsIgnoreCase(enablingCond)){ if(m==(list.getItemCount()-1)){ newEnablingCond1 = "("+list.getItem(m)+")"; } else{ newEnablingCond1 = "("+list.getItem(m)+")&"; } } else{ if(m==(list.getItemCount()-1)){ newEnablingCond1 = "~("+list.getItem(m)+")"; } else{ newEnablingCond1 = "~("+list.getItem(m)+")&"; } } sb.append(newEnablingCond1); } //String newEnablingCond = sb.toString(); //System.out.println("newEnablinCondition : "+newEnablingCond+"\n"); lpnObj.addEnabling("t" +numTransitions, enablingCond); numTransitions++; lpnObj.addMovement("pStart"+(numStartPlaces-1), "t" +(numTransitions-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement("t" +(numTransitions-1), "p"+(numPlaces-1)); } else if(!(switchCaseTree.getChild(j).getType()==edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ELSEIF) & !(switchCaseTree.getChild(j).getType()==edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ELSE)){ lpnObj=generateLPN((CommonTree)switchCaseTree.getChild(j), lpnObj, true, stayConditions); } } lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); numTransitions++; lpnObj.addMovement("p"+(numPlaces-1),"t" +(numTransitions-1)); lpnObj.addMovement("t" +(numTransitions-1), "pEnd"+(numEndPlaces-1)); for(int j=0;j<switchCaseTree.getChildCount();j++){ if(switchCaseTree.getChild(j).getType()==edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ELSEIF){ lpnObj=generateLPN((CommonTree)switchCaseTree.getChild(j), lpnObj, true, stayConditions); } } for(int j=0;j<switchCaseTree.getChildCount();j++){ if(switchCaseTree.getChild(j).getType()==edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ELSE){ elsePartExists = true; lpnObj=generateLPN((CommonTree)switchCaseTree.getChild(j), lpnObj, true, stayConditions); } } if(!elsePartExists){ String newEnablingCond1 = ""; StringBuffer sb = new StringBuffer(); for(int m=0;m<list.getItemCount();m++){ if(m==(list.getItemCount()-1)){ newEnablingCond1 = "~("+list.getItem(m)+")"; } else{ newEnablingCond1 = "~("+list.getItem(m)+")&"; } sb.append(newEnablingCond1); } String newEnablingCond = sb.toString(); //System.out.println(" condition is :"+newEnablingCond); lpnObj.addTransition("t" + numTransitions); lpnObj.addEnabling("t" +numTransitions, newEnablingCond); numTransitions++; lpnObj.addMovement("pStart"+(numStartPlaces-1),"t" +(numTransitions-1)); lpnObj.addMovement("t" +(numTransitions-1),"pEnd"+(numEndPlaces-1)); } lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); numTransitions++; lpnObj.addMovement("pEnd"+(numEndPlaces-1),"t" +(numTransitions-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement("t" +(numTransitions-1),"p"+(numPlaces-1)); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.END : break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ELSEIF : //System.out.println("ELSEIF "); for(int j=0;j<switchCaseTree.getChildCount();j++){ if(j==0){ enablingCond= generateExpression((CommonTree)switchCaseTree.getChild(0)); //System.out.println("enabling condition :"+enablingCond); if(numPlaces==0){ lpnObj.addPlace("p"+numPlaces, true); numPlaces++; } lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); StringBuffer sb2 = new StringBuffer(); String newEnablingCondition1= ""; int counter=-1; for(int m=0;m<list.getItemCount();m++){ //System.out.println("item :"+list.getItem(m).toString()); if(list.getItem(m).toString().equalsIgnoreCase(enablingCond)){ counter=m; } } //System.out.println("counter is : "+counter); for(int m=0;m<=counter;m++){ //System.out.println("item :"+list.getItem(m).toString()); if(list.getItem(m).toString().equalsIgnoreCase(enablingCond)){ newEnablingCondition1 = "("+list.getItem(m)+")"; } else{ if(m==(list.getItemCount()-1)){ newEnablingCondition1 = "~("+list.getItem(m)+")"; } else{ newEnablingCondition1 = "~("+list.getItem(m)+")&"; } } sb2.append(newEnablingCondition1); } String newEnablingCondition = sb2.toString(); //System.out.println("newEnablinCondition in ELSEIF : "+newEnablingCondition+"\n"); lpnObj.addEnabling("t" +numTransitions, newEnablingCondition); numTransitions++; lpnObj.addMovement("pStart"+(numStartPlaces-1), "t" +(numTransitions-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement("t" +(numTransitions-1), "p"+(numPlaces-1)); } else if(switchCaseTree.getChild(j).getType()==edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.IF){ lpnObj=generateLPN((CommonTree)switchCaseTree.getChild(j), lpnObj, true, stayConditions); } else{ lpnObj=generateLPN((CommonTree)switchCaseTree.getChild(j), lpnObj, true, stayConditions); } } lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); numTransitions++; lpnObj.addMovement("p"+(numPlaces-1),"t" +(numTransitions-1)); lpnObj.addMovement("t" +(numTransitions-1), "pEnd"+(numEndPlaces-1)); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ELSE : //System.out.println("ELSE "); StringBuffer sb3 = new StringBuffer(); String newEnablingCondition2= ""; for(int m=0;m<list.getItemCount();m++){ if(m==(list.getItemCount()-1)){ newEnablingCondition2 = "~("+list.getItem(m)+")"; } else{ newEnablingCondition2 = "~("+list.getItem(m)+")&"; } sb3.append(newEnablingCondition2); } String newEnablingCond2 = sb3.toString(); //System.out.println("newEnablinCondition in ELSE : "+newEnablingCond2+"\n"); lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); lpnObj.addEnabling("t" +numTransitions, newEnablingCond2); numTransitions++; lpnObj.addMovement("pStart"+(numStartPlaces-1), "t" +(numTransitions-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement("t" +(numTransitions-1), "p"+(numPlaces-1)); for(int j=0;j<switchCaseTree.getChildCount();j++){ lpnObj=generateLPN((CommonTree)switchCaseTree.getChild(j), lpnObj, true, stayConditions); //enablingCond= generateExpression((CommonTree)switchCaseTree.getChild(0)); if(numPlaces==0){ lpnObj.addPlace("p"+numPlaces, true); numPlaces++; } } lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); numTransitions++; lpnObj.addMovement("p"+(numPlaces-1),"t" +(numTransitions-1)); lpnObj.addMovement("t" +(numTransitions-1), "pEnd"+(numEndPlaces-1)); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.WAIT_POSEDGE : enablingCond= generateExpression((CommonTree)switchCaseTree.getChild(0)); //System.out.println(""); enablingCond= generateExpression((CommonTree)switchCaseTree.getChild(0)); if(numPlaces==0){ lpnObj.addPlace("p"+numPlaces, true); numPlaces++; } lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); if (stayConditions!=null) { lpnObj.addEnabling("t" +numTransitions, "~("+enablingCond+")" + "&" + stayConditions); } else { lpnObj.addEnabling("t" +numTransitions, "~("+enablingCond+")"); } numTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "t" +(numTransitions-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement( "t" +(numTransitions-1),"p"+(numPlaces-1)); lpnObj.addTransition("t" + numTransitions); lpnObj.changeDelay("t" + numTransitions, "0"); if (stayConditions!=null) { lpnObj.addEnabling("t" +numTransitions, "(" + enablingCond + ")" + "&" + stayConditions); } else { lpnObj.addEnabling("t" +numTransitions, "(" + enablingCond + ")"); } numTransitions++; lpnObj.addMovement("p"+(numPlaces-1), "t" +(numTransitions-1)); lpnObj.addPlace("p"+numPlaces, false); numPlaces++; lpnObj.addMovement( "t" +(numTransitions-1),"p"+(numPlaces-1)); break; default : break; } } } return lpnObj; } public static String generateExpression(CommonTree newChild) { String result = ""; String string1= ""; String string2=""; if ( newChild != null ) { switch (newChild.getType()) { case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.WAIT: break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.IF: break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.ID : result= newChild.toString(); //System.out.println("String in ID : "+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.FLOAT: result=newChild.toString(); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.INT : result=newChild.toString(); //System.out.println("String in INT :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.STRING : result=newChild.toString(); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.WAIT_POSEDGE : result=generateExpression((CommonTree)newChild.getChild(0)); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.GET : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + ">" +string2); //System.out.println("String in GET :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.AND : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= ("("+string1 + ")&(" +string2+")"); //result= (string1 + "&" +string2); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.DIV : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + "/" +string2); //System.out.println("result2 :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.EQUAL : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + "=" +string2); //System.out.println("result2 :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.GETEQ : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + ">=" +string2); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.LET : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + "<" +string2); //System.out.println("result2 :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.LETEQ : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + "<=" +string2); //System.out.println("result2 :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.MINUS : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + "-" +string2); //System.out.println("result2 :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.MOD : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + "%" +string2); //System.out.println("result2 :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.MULT : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + "*" +string2); //System.out.println("result2 :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.NOT : string1= generateExpression((CommonTree)newChild.getChild(0)); result= ("~(" +string1+")"); //result= "~"+string1; //System.out.println("String in NOT :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.NOT_EQUAL : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + "!=" +string2); //System.out.println("result2 :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.OR : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + "|" +string2); //System.out.println("result2 :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.PLUS : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + " + " +string2); //System.out.println("result2 :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.SAMEAS : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= (string1 + "=" +string2); //System.out.println("String in SAMEAS :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.LPARA : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= "("; break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.RPARA : string1= generateExpression((CommonTree)newChild.getChild(0)); string2= generateExpression((CommonTree)newChild.getChild(1)); result= ")"; //System.out.println("String in SAMEAS :"+result); break; case edu.utah.ece.async.lema.verification.lpn.properties.PropertyLexer.UNIFORM : result= newChild.toString(); //System.out.println("String in UNIFORM :"+result); break; default : break; } } return result; } }
/* * Copyright 1999-2015 dangdang.com. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package com.dangdang.ddframe.job.internal.sharding; import com.dangdang.ddframe.job.api.JobConfiguration; import com.dangdang.ddframe.job.fixture.TestJob; import com.dangdang.ddframe.job.internal.config.ConfigurationService; import com.dangdang.ddframe.job.internal.election.LeaderElectionService; import com.dangdang.ddframe.job.internal.env.LocalHostService; import com.dangdang.ddframe.job.internal.execution.ExecutionService; import com.dangdang.ddframe.job.internal.server.ServerService; import com.dangdang.ddframe.job.internal.storage.JobNodeStorage; import com.dangdang.ddframe.job.internal.storage.TransactionExecutionCallback; import com.dangdang.ddframe.job.plugin.sharding.strategy.AverageAllocationJobShardingStrategy; import org.apache.curator.framework.api.transaction.CuratorTransactionBridge; import org.apache.curator.framework.api.transaction.CuratorTransactionFinal; import org.apache.curator.framework.api.transaction.TransactionCreateBuilder; import org.apache.curator.framework.api.transaction.TransactionDeleteBuilder; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.unitils.util.ReflectionUtils; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * * @author xiong.j support jdk1.6 * */ public final class ShardingServiceTest { @Mock private JobNodeStorage jobNodeStorage; @Mock private LocalHostService localHostService; @Mock private LeaderElectionService leaderElectionService; @Mock private ConfigurationService configService; @Mock private ExecutionService executionService; @Mock private ServerService serverService; private final JobConfiguration jobConfig = new JobConfiguration("testJob", TestJob.class, 3, "0/1 * * * * ?"); private final ShardingService shardingService = new ShardingService(null, jobConfig); @Before public void setUp() throws NoSuchFieldException { MockitoAnnotations.initMocks(this); ReflectionUtils.setFieldValue(shardingService, "jobNodeStorage", jobNodeStorage); ReflectionUtils.setFieldValue(shardingService, "localHostService", localHostService); ReflectionUtils.setFieldValue(shardingService, "leaderElectionService", leaderElectionService); ReflectionUtils.setFieldValue(shardingService, "configService", configService); ReflectionUtils.setFieldValue(shardingService, "executionService", executionService); ReflectionUtils.setFieldValue(shardingService, "serverService", serverService); when(localHostService.getIp()).thenReturn("mockedIP"); when(localHostService.getHostName()).thenReturn("mockedHostName"); when(jobNodeStorage.getJobConfiguration()).thenReturn(jobConfig); } @Test public void assertSetReshardingFlag() { shardingService.setReshardingFlag(); verify(jobNodeStorage).createJobNodeIfNeeded("leader/sharding/necessary"); } @Test public void assertIsNeedSharding() { when(jobNodeStorage.isJobNodeExisted("leader/sharding/necessary")).thenReturn(true); assertTrue(shardingService.isNeedSharding()); verify(jobNodeStorage).isJobNodeExisted("leader/sharding/necessary"); } @Test public void assertShardingWhenUnnecessary() { when(jobNodeStorage.isJobNodeExisted("leader/sharding/necessary")).thenReturn(false); shardingService.shardingIfNecessary(); verify(jobNodeStorage).isJobNodeExisted("leader/sharding/necessary"); } @Test public void assertShardingWhenIsNotLeaderAndIsShardingProcessing() { when(jobNodeStorage.isJobNodeExisted("leader/sharding/necessary")).thenReturn(true, true, false, false); when(leaderElectionService.isLeader()).thenReturn(false); when(jobNodeStorage.isJobNodeExisted("leader/sharding/processing")).thenReturn(true, false); shardingService.shardingIfNecessary(); verify(jobNodeStorage, times(4)).isJobNodeExisted("leader/sharding/necessary"); verify(jobNodeStorage, times(2)).isJobNodeExisted("leader/sharding/processing"); } @Test public void assertShardingNecessaryWhenMonitorExecutionEnabled() { when(jobNodeStorage.isJobNodeExisted("leader/sharding/necessary")).thenReturn(true); when(leaderElectionService.isLeader()).thenReturn(true); when(configService.isMonitorExecution()).thenReturn(true); when(serverService.getAllServers()).thenReturn(Arrays.asList("ip1", "ip2")); when(executionService.hasRunningItems()).thenReturn(true, false); when(configService.getJobShardingStrategyClass()).thenReturn(AverageAllocationJobShardingStrategy.class.getCanonicalName()); when(configService.getShardingTotalCount()).thenReturn(3); when(configService.getShardingItemParameters()).thenReturn(Collections.<Integer, String>emptyMap()); shardingService.shardingIfNecessary(); verify(jobNodeStorage).isJobNodeExisted("leader/sharding/necessary"); verify(leaderElectionService).isLeader(); verify(configService).isMonitorExecution(); verify(executionService, times(2)).hasRunningItems(); verify(jobNodeStorage).removeJobNodeIfExisted("servers/ip1/sharding"); verify(jobNodeStorage).removeJobNodeIfExisted("servers/ip2/sharding"); verify(jobNodeStorage).fillEphemeralJobNode("leader/sharding/processing", ""); verify(configService).getJobShardingStrategyClass(); verify(configService).getShardingTotalCount(); verify(configService).getShardingItemParameters(); verify(jobNodeStorage).executeInTransaction(any(TransactionExecutionCallback.class)); } @Test public void assertShardingNecessaryWhenMonitorExecutionDisabled() throws Exception { when(jobNodeStorage.isJobNodeExisted("leader/sharding/necessary")).thenReturn(true); when(leaderElectionService.isLeader()).thenReturn(true); when(configService.isMonitorExecution()).thenReturn(false); when(serverService.getAllServers()).thenReturn(Arrays.asList("ip1", "ip2")); when(configService.getJobShardingStrategyClass()).thenReturn(AverageAllocationJobShardingStrategy.class.getCanonicalName()); when(configService.getShardingTotalCount()).thenReturn(3); when(configService.getShardingItemParameters()).thenReturn(Collections.<Integer, String>emptyMap()); shardingService.shardingIfNecessary(); verify(jobNodeStorage).isJobNodeExisted("leader/sharding/necessary"); verify(leaderElectionService).isLeader(); verify(configService).isMonitorExecution(); verify(jobNodeStorage).removeJobNodeIfExisted("servers/ip1/sharding"); verify(jobNodeStorage).removeJobNodeIfExisted("servers/ip2/sharding"); verify(jobNodeStorage).fillEphemeralJobNode("leader/sharding/processing", ""); verify(configService).getJobShardingStrategyClass(); verify(configService).getShardingTotalCount(); verify(configService).getShardingItemParameters(); verify(jobNodeStorage).executeInTransaction(any(TransactionExecutionCallback.class)); } @Test public void assertGetLocalHostShardingItemsWhenNodeExisted() { when(jobNodeStorage.isJobNodeExisted("servers/mockedIP/sharding")).thenReturn(true); when(jobNodeStorage.getJobNodeDataDirectly("servers/mockedIP/sharding")).thenReturn("0,1,2"); assertThat(shardingService.getLocalHostShardingItems(), is(Arrays.asList(0, 1, 2))); verify(jobNodeStorage).isJobNodeExisted("servers/mockedIP/sharding"); verify(jobNodeStorage).getJobNodeDataDirectly("servers/mockedIP/sharding"); } @Test public void assertGetLocalHostShardingWhenNodeNotExisted() { when(jobNodeStorage.isJobNodeExisted("servers/mockedIP/sharding")).thenReturn(false); assertThat(shardingService.getLocalHostShardingItems(), is(Collections.EMPTY_LIST)); verify(jobNodeStorage).isJobNodeExisted("servers/mockedIP/sharding"); } @Test public void assertPersistShardingInfoTransactionExecutionCallback() throws Exception { CuratorTransactionFinal curatorTransactionFinal = mock(CuratorTransactionFinal.class); TransactionCreateBuilder transactionCreateBuilder = mock(TransactionCreateBuilder.class); TransactionDeleteBuilder transactionDeleteBuilder = mock(TransactionDeleteBuilder.class); CuratorTransactionBridge curatorTransactionBridge = mock(CuratorTransactionBridge.class); when(curatorTransactionFinal.create()).thenReturn(transactionCreateBuilder); when(transactionCreateBuilder.forPath("/testJob/servers/host0/sharding", "0,1,2".getBytes())).thenReturn(curatorTransactionBridge); when(curatorTransactionBridge.and()).thenReturn(curatorTransactionFinal); when(curatorTransactionFinal.delete()).thenReturn(transactionDeleteBuilder); when(transactionDeleteBuilder.forPath("/testJob/leader/sharding/necessary")).thenReturn(curatorTransactionBridge); when(curatorTransactionBridge.and()).thenReturn(curatorTransactionFinal); when(curatorTransactionFinal.delete()).thenReturn(transactionDeleteBuilder); when(transactionDeleteBuilder.forPath("/testJob/leader/sharding/processing")).thenReturn(curatorTransactionBridge); when(curatorTransactionBridge.and()).thenReturn(curatorTransactionFinal); Map<String, List<Integer>> shardingItems = new HashMap<String, List<Integer>>(1); shardingItems.put("host0", Arrays.asList(0, 1, 2)); ShardingService.PersistShardingInfoTransactionExecutionCallback actual = shardingService.new PersistShardingInfoTransactionExecutionCallback(shardingItems); actual.execute(curatorTransactionFinal); verify(curatorTransactionFinal).create(); verify(transactionCreateBuilder).forPath("/testJob/servers/host0/sharding", "0,1,2".getBytes()); verify(curatorTransactionFinal, times(2)).delete(); verify(transactionDeleteBuilder).forPath("/testJob/leader/sharding/necessary"); verify(transactionDeleteBuilder).forPath("/testJob/leader/sharding/processing"); verify(curatorTransactionBridge, times(3)).and(); } }
package com.kd.klink.ars.util; import java.text.DecimalFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.GregorianCalendar; import com.remedy.arsys.api.*; import org.apache.log4j.*; /** * This is a utility class used for operations common to Ars as a datasource. */ public class ArsUtil { private static final Logger logger = Logger.getLogger(ArsUtil.class.getName()); private static final DecimalFormat twoDigitFormatter = new DecimalFormat("00"); private static final SimpleDateFormat iso8601DateFormatter = new SimpleDateFormat("yyyy-MM-dd"); private static final SimpleDateFormat iso8601DateTimeFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); /** Converts a string in ISO8601 Format into the string corresponding to an Ars internal date representation */ public static String convertIso8601FormatToArsDate(String dateString) throws ParseException { int years, months, days; DateInfo dateInfo = new DateInfo(); try { years = Integer.parseInt(dateString.substring(0,4)); } catch (NumberFormatException e) { throw new ParseException("Unparseable date: " + dateString, 0); } try { months = Integer.parseInt(dateString.substring(5,7)); } catch (NumberFormatException e) { throw new ParseException("Unparseable date: " + dateString, 5); } try { days = Integer.parseInt(dateString.substring(8)); } catch (NumberFormatException e) { throw new ParseException("Unparseable date: " + dateString, 8); } dateInfo.setDate(years, months, days); return String.valueOf(dateInfo.getValue()); } /** Converts a string in ISO8601 Format into the string corresponding to an Ars internal time representation */ public static String convertIso8601FormatToArsTime(String timeString) throws ParseException { Date date = iso8601DateTimeFormatter.parse(timeString); int secondsSinceEpoch = (int)(date.getTime() / 1000); return String.valueOf(secondsSinceEpoch); } /** Converts a string in ISO8601 Format into the string corresponding to an Ars internal time of day representation */ public static String convertIso8601FormatToArsTimeOfDay(String timeOfDayString) throws ParseException { int hours, minutes, seconds; int total = 0; try { hours = Integer.parseInt(timeOfDayString.substring(0,2)); } catch (NumberFormatException e) { throw new ParseException("Unparseable dime: " + timeOfDayString, 0); } try { minutes = Integer.parseInt(timeOfDayString.substring(3,5)); } catch (NumberFormatException e) { throw new ParseException("Unparseable dime: " + timeOfDayString, 3); } try { seconds = Integer.parseInt(timeOfDayString.substring(6)); } catch (NumberFormatException e) { throw new ParseException("Unparseable dime: " + timeOfDayString, 6); } total = hours*3600 + minutes*60 + seconds; return String.valueOf(total); } /** Converts a date string in Ars internal format into the corresponding iso8601 string. */ public static String convertArsDateToIso8601Format(DateInfo dateInfo) { Date date = new Date(dateInfo.GetDate().getTimeInMillis()); return iso8601DateFormatter.format(date); } /** Converts a time string in Ars internal format into the corresponding iso8601 string. */ public static String convertArsTimeToIso8601Format(String time) { Date date = new Date(Long.parseLong(time) * 1000); return iso8601DateTimeFormatter.format(date); } /** Converts a time of day string in Ars internal format into the corresponding iso8601 string. */ public static String convertArsTimeOfDayToIso8601Format(String timeOfDay) { int time = Integer.parseInt(timeOfDay); int hours, minutes, seconds; for(hours=0; time >= 3600; hours++) { time -= 3600; } for(minutes=0; time >= 60; minutes++) { time -= 60; } seconds = time; return twoDigitFormatter.format(hours) + ":" + twoDigitFormatter.format(minutes) + ":" + twoDigitFormatter.format(seconds); } /** Returns the string value associated with the internal integer reprsentation of an attachment type. */ public static String decodeAttachmentType(int attachmentType) { java.lang.String attachmentTypeString; switch (attachmentType) { case Constants.AR_ATTACH_FIELD_TYPE_EMBED: attachmentTypeString = "EMBEDDED"; break; case Constants.AR_ATTACH_FIELD_TYPE_LINK: attachmentTypeString = "LINK"; break; default: logger.debug("Unknown AttachmentType: " + attachmentType); attachmentTypeString = "UNKNOWN_TYPE"; break; } return attachmentTypeString; } /** Returns internal integer reprsentation associated with the string value of an attachment type. */ public static int encodeAttachmentType(String attachmentType) { int attachmentTypeInt; if (attachmentType.equals("EMBEDDED")) { attachmentTypeInt = Constants.AR_ATTACH_FIELD_TYPE_EMBED; } else if (attachmentType.equals("LINK")) { attachmentTypeInt = Constants.AR_ATTACH_FIELD_TYPE_LINK; } else { logger.debug("Unknown AttachmentType: " + attachmentType); attachmentTypeInt = 0; } return attachmentTypeInt; } /** Returns the string value associated with the internal integer reprsentation of a permission type. */ public static String decodeFieldPermissionType(int permissionType) { java.lang.String permissionTypeString; switch (permissionType) { case Constants.AR_PERMISSIONS_CHANGE: permissionTypeString = "CHANGE"; break; case Constants.AR_PERMISSIONS_NONE: permissionTypeString = "NONE"; break; case Constants.AR_PERMISSIONS_VIEW: permissionTypeString = "VIEW"; break; default: logger.debug("Unknown FieldPermissionType: " + permissionType); permissionTypeString = "UNKNOWN_TYPE"; break; } return permissionTypeString; } /** Returns the internal integer reprsentation associaated with the string value of a permission type. */ public static int encodeFieldPermissionType(String permissionType) { int permissionTypeInt; if (permissionType.equals("CHANGE")) { permissionTypeInt = Constants.AR_RETURN_OK; } else if (permissionType.equals("NONE")) { permissionTypeInt = Constants.AR_RETURN_ERROR; } else if (permissionType.equals("VIEW")) { permissionTypeInt = Constants.AR_RETURN_FATAL; } else { logger.debug("Unknown FieldPermissionType: " + permissionType); permissionTypeInt = 0; } return permissionTypeInt; } /** Returns the string value associated with the internal integer reprsentation of a schema permission type. */ public static String decodeSchemaPermissionType(int permissionType) { java.lang.String permissionTypeString; switch (permissionType) { case Constants.AR_PERMISSIONS_HIDDEN: permissionTypeString = "HIDDEN"; break; case Constants.AR_PERMISSIONS_NONE: permissionTypeString = "NONE"; break; case Constants.AR_PERMISSIONS_VISIBLE: permissionTypeString = "VISIBLE"; break; default: logger.debug("Unknown SchemaPermissionType: " + permissionType); permissionTypeString = "UNKNOWN_TYPE"; break; } return permissionTypeString; } /** Returns the internal integer reprsentation associated with the string value of a schema permission type. */ public static int encodeSchemaPermissionType(String permissionType) { int permissionTypeInt; if (permissionType.equals("HIDDEN")) { permissionTypeInt = Constants.AR_RETURN_WARNING; } else if (permissionType.equals("NONE")) { permissionTypeInt = Constants.AR_RETURN_ERROR; } else if (permissionType.equals("VISIBLE")) { permissionTypeInt = Constants.AR_RETURN_BAD_STATUS; } else { logger.debug("Unknown SchemaPermissionType: " + permissionType); permissionTypeInt = 0; } return permissionTypeInt; } /** Returns the string value associated with the internal integer reprsentation of a message type. */ public static String decodeMessageType(int messageType) { java.lang.String messageTypeString; switch (messageType) { case Constants.AR_RETURN_OK: messageTypeString = "OK"; break; case Constants.AR_RETURN_WARNING: messageTypeString = "WARNING"; break; case Constants.AR_RETURN_ERROR: messageTypeString = "ERROR"; break; case Constants.AR_RETURN_FATAL: messageTypeString = "FATAL"; break; case Constants.AR_RETURN_BAD_STATUS: messageTypeString = "BAD_STATUS"; break; case Constants.AR_RETURN_PROMPT: messageTypeString = "PROMPT"; break; case Constants.AR_RETURN_ACCESSIBLE: messageTypeString = "ACCESSIBLE"; break; default: logger.debug("Unknown MessageType: " + messageType); messageTypeString = "UNKNOWN_TYPE"; break; } return messageTypeString; } /** Returns the internal integer reprsentation associated with the string value of a message type. */ public static int encodeMessageType(String messageType) { int messageTypeInt; if (messageType.equals("OK")) { messageTypeInt = Constants.AR_RETURN_OK; } else if (messageType.equals("WARNING")) { messageTypeInt = Constants.AR_RETURN_WARNING; } else if (messageType.equals("ERROR")) { messageTypeInt = Constants.AR_RETURN_ERROR; } else if (messageType.equals("FATAL")) { messageTypeInt = Constants.AR_RETURN_FATAL; } else if (messageType.equals("BAD_STATUS")) { messageTypeInt = Constants.AR_RETURN_BAD_STATUS; } else if (messageType.equals("PROMPT")) { messageTypeInt = Constants.AR_RETURN_PROMPT; } else if (messageType.equals("ACCESSIBLE")) { messageTypeInt = Constants.AR_RETURN_ACCESSIBLE; } else { logger.debug("Unknown MessageType: " + messageType); messageTypeInt = 0; } return messageTypeInt; } public static String decodeFieldOption(int fieldOption) { java.lang.String fieldOptionString; switch (fieldOption) { case 1: fieldOptionString = "REQUIRED"; break; case 2: fieldOptionString = "OPTIONAL"; break; case 3: fieldOptionString = "SYSTEM"; break; case 4: fieldOptionString = "DISPLAY_ONLY"; break; default: logger.debug("Unknown FieldOption: " + fieldOption); fieldOptionString = "UNKNOWN_OPTION"; break; } return fieldOptionString; } public static int encodeFieldOption(String fieldOption) { int fieldOptionInt; if (fieldOption.equals("REQUIRED")) { fieldOptionInt = 1; } else if (fieldOption.equals("OPTIONAL")) { fieldOptionInt = 2; } else if (fieldOption.equals("SYSTEM")) { fieldOptionInt = 3; } else if (fieldOption.equals("DISPLAY_ONLY")) { fieldOptionInt = 4; } else { logger.debug("Unknown FieldOption: " + fieldOption); fieldOptionInt = 0; } return fieldOptionInt; } /** * Returns the string representation of a Schema type integer constant or * "Unknown Type" if the type in integer form is unrecognized. * * 1 = BASE, 2 = JOIN, 3 = VIEW, 4 = DISPLAY-ONLY */ public static String decodeSchemaType(int schemaType) { java.lang.String schemaTypeString; switch (schemaType) { case 1: schemaTypeString = "BASE"; break; case 2: schemaTypeString = "JOIN"; break; case 3: schemaTypeString = "VIEW"; break; case 4: schemaTypeString = "DISPLAY_ONLY"; break; default: logger.debug("Unknown SchemaType: " + schemaType); schemaTypeString = "UNKNOWN_TYPE"; break; } return schemaTypeString; } /** * Returns the integer representation of a Schema type string constant or * 0 if the type in integer form is unrecognized. * * 1 = BASE, 2 = JOIN, 3 = VIEW, 4 = DISPLAY-ONLY, 0 = Unknown Type */ public static int encodeSchemaType(String schemaType) { int schemaTypeInt; if (schemaType.equals("BASE")) { schemaTypeInt = 1; } else if (schemaType.equals("JOIN")) { schemaTypeInt = 2; } else if (schemaType.equals("VIEW")) { schemaTypeInt = 3; } else if (schemaType.equals("DISPLAY_ONLY")) { schemaTypeInt = 4; } else { logger.debug("Unknown SchemaType: " + schemaType); schemaTypeInt = 0; } return schemaTypeInt; } /** * Returns the string representation of a field type integer constant (as * defined in com.remedy.arsys.api.Constants). Valid strings include: * 'ATTACHMENT', 'ATTACHMENT_POOL', 'COLUMN', 'CONTROL', 'DATA', 'PAGE', * 'PAGE_HOLDER', 'TABLE', and 'TRIM'. */ public static String decodeFieldType(int fieldType) { java.lang.String fieldTypeString; switch (fieldType) { case Constants.AR_FIELD_TYPE_ATTACH: fieldTypeString = "ATTACHMENT"; break; case Constants.AR_FIELD_TYPE_ATTACH_POOL: fieldTypeString = "ATTACHMENT_POOL"; break; case Constants.AR_FIELD_TYPE_COLUMN: fieldTypeString = "COLUMN"; break; case Constants.AR_FIELD_TYPE_CONTROL: fieldTypeString = "CONTROL"; break; case Constants.AR_FIELD_TYPE_DATA: fieldTypeString = "DATA"; break; case Constants.AR_FIELD_TYPE_PAGE: fieldTypeString = "PAGE"; break; case Constants.AR_FIELD_TYPE_PAGE_HOLDER: fieldTypeString = "PAGE_HOLDER"; break; case Constants.AR_FIELD_TYPE_TABLE: fieldTypeString = "TABLE"; break; case Constants.AR_FIELD_TYPE_TRIM: fieldTypeString = "TRIM"; break; case 0: fieldTypeString = "DATA_VISUALIZATION"; break; default: logger.debug("Unknown FieldType: " + fieldType); fieldTypeString = "UNKNOWN_TYPE"; break; } return fieldTypeString; } /** * Returns the integer representation of a field type string constant (as * defined in com.remedy.arsys.api.Constants). Valid strings include: * 'ATTACHMENT', 'ATTACHMENT_POOL', 'COLUMN', 'CONTROL', 'DATA', 'PAGE', * 'PAGE_HOLDER', 'TABLE', and 'TRIM'. */ public static int encodeFieldType(String fieldType) { int fieldTypeInt; if (fieldType.equals("ATTACHMENT")) { fieldTypeInt = Constants.AR_FIELD_TYPE_ATTACH; } else if (fieldType.equals("ATTACHMENT_POOL")) { fieldTypeInt = Constants.AR_FIELD_TYPE_ATTACH_POOL; } else if (fieldType.equals("COLUMN")) { fieldTypeInt = Constants.AR_FIELD_TYPE_COLUMN; } else if (fieldType.equals("CONTROL")) { fieldTypeInt = Constants.AR_FIELD_TYPE_CONTROL; } else if (fieldType.equals("DATA")) { fieldTypeInt = Constants.AR_FIELD_TYPE_DATA; } else if (fieldType.equals("DATA_VISUALIZATION")) { fieldTypeInt = 0; } else if (fieldType.equals("PAGE")) { fieldTypeInt = Constants.AR_FIELD_TYPE_PAGE; } else if (fieldType.equals("PAGE_HOLDER")) { fieldTypeInt = Constants.AR_FIELD_TYPE_PAGE_HOLDER; } else if (fieldType.equals("TABLE")) { fieldTypeInt = Constants.AR_FIELD_TYPE_TABLE; } else if (fieldType.equals("TRIM")) { fieldTypeInt = Constants.AR_FIELD_TYPE_TRIM; } else { logger.debug("Unknown FieldType: " + fieldType); fieldTypeInt = Constants.AR_DATA_TYPE_NULL; } return fieldTypeInt; } /** */ public static String decodeDataType(int dataType) { java.lang.String dataTypeString; switch (dataType) { case Constants.AR_DATA_TYPE_ATTACH: dataTypeString = "ATTACHMENT"; break; case Constants.AR_DATA_TYPE_ATTACH_POOL: dataTypeString = "ATTACHMENT_POOL"; break; case Constants.AR_DATA_TYPE_BITMASK: dataTypeString = "BITMASK"; break; case Constants.AR_DATA_TYPE_BYTES: dataTypeString = "BYTES"; break; case Constants.AR_DATA_TYPE_CHAR: dataTypeString = "CHAR"; break; case Constants.AR_DATA_TYPE_COLUMN: dataTypeString = "COLUMN"; break; case Constants.AR_DATA_TYPE_CONTROL: dataTypeString = "CONTROL"; break; case Constants.AR_DATA_TYPE_COORDS: dataTypeString = "COORDS"; break; case Constants.AR_DATA_TYPE_CURRENCY: dataTypeString = "CURRENCY"; break; case Constants.AR_DATA_TYPE_DATE: dataTypeString = "DATE"; break; case Constants.AR_DATA_TYPE_DECIMAL: dataTypeString = "DECIMAL"; break; case Constants.AR_DATA_TYPE_DIARY: dataTypeString = "DIARY"; break; case Constants.AR_DATA_TYPE_DISPLAY: dataTypeString = "DISPLAY"; break; case Constants.AR_DATA_TYPE_ENUM: dataTypeString = "ENUM"; break; case Constants.AR_DATA_TYPE_INTEGER: dataTypeString = "INTEGER"; break; case Constants.AR_DATA_TYPE_JOIN: dataTypeString = "JOIN"; break; case Constants.AR_DATA_TYPE_KEYWORD: dataTypeString = "KEYWORD"; break; case Constants.AR_DATA_TYPE_NULL: dataTypeString = "NULL"; break; case Constants.AR_DATA_TYPE_PAGE: dataTypeString = "PAGE"; break; case Constants.AR_DATA_TYPE_PAGE_HOLDER: dataTypeString = "PAGE_HOLDER"; break; case Constants.AR_DATA_TYPE_REAL: dataTypeString = "REAL"; break; case Constants.AR_DATA_TYPE_TABLE: dataTypeString = "TABLE"; break; case Constants.AR_DATA_TYPE_TIME: dataTypeString = "TIME"; break; case Constants.AR_DATA_TYPE_TIME_OF_DAY: dataTypeString = "TIME_OF_DAY"; break; case Constants.AR_DATA_TYPE_TRIM: dataTypeString = "TRIM"; break; case Constants.AR_DATA_TYPE_ULONG: dataTypeString = "ULONG"; break; case Constants.AR_DATA_TYPE_VIEW: dataTypeString = "VIEW"; break; default: logger.debug("Unknown DataType: " + dataType); dataTypeString = "UNKNOWN_TYPE"; break; } return dataTypeString; } /** */ public static int encodeDataType(String dataType) { int dataTypeInt; if (dataType.equals("ATTACHMENT")) { dataTypeInt = Constants.AR_DATA_TYPE_ATTACH; } else if (dataType.equals("ATTACHMENT_POOL")) { dataTypeInt = Constants.AR_DATA_TYPE_ATTACH_POOL; } else if (dataType.equals("BITMASK")) { dataTypeInt = Constants.AR_DATA_TYPE_BITMASK; } else if (dataType.equals("BYTES")) { dataTypeInt = Constants.AR_DATA_TYPE_BYTES; } else if (dataType.equals("CHAR")) { dataTypeInt = Constants.AR_DATA_TYPE_CHAR; } else if (dataType.equals("COLUMN")) { dataTypeInt = Constants.AR_DATA_TYPE_COLUMN; } else if (dataType.equals("CONTROL")) { dataTypeInt = Constants.AR_DATA_TYPE_CONTROL; } else if (dataType.equals("COORDS")) { dataTypeInt = Constants.AR_DATA_TYPE_COORDS; } else if (dataType.equals("CURRENCY")) { dataTypeInt = Constants.AR_DATA_TYPE_CURRENCY; } else if (dataType.equals("DATE")) { dataTypeInt = Constants.AR_DATA_TYPE_DATE; } else if (dataType.equals("DECIMAL")) { dataTypeInt = Constants.AR_DATA_TYPE_DECIMAL; } else if (dataType.equals("DIARY")) { dataTypeInt = Constants.AR_DATA_TYPE_DIARY; } else if (dataType.equals("DISPLAY")) { dataTypeInt = Constants.AR_DATA_TYPE_DISPLAY; } else if (dataType.equals("ENUM")) { dataTypeInt = Constants.AR_DATA_TYPE_ENUM; } else if (dataType.equals("INTEGER")) { dataTypeInt = Constants.AR_DATA_TYPE_INTEGER; } else if (dataType.equals("JOIN")) { dataTypeInt = Constants.AR_DATA_TYPE_JOIN; } else if (dataType.equals("KEYWORD")) { dataTypeInt = Constants.AR_DATA_TYPE_KEYWORD; } else if (dataType.equals("PAGE")) { dataTypeInt = Constants.AR_DATA_TYPE_PAGE; } else if (dataType.equals("PAGE_HOLDER")) { dataTypeInt = Constants.AR_DATA_TYPE_PAGE_HOLDER; } else if (dataType.equals("REAL")) { dataTypeInt = Constants.AR_DATA_TYPE_REAL; } else if (dataType.equals("TABLE")) { dataTypeInt = Constants.AR_DATA_TYPE_TABLE; } else if (dataType.equals("TIME")) { dataTypeInt = Constants.AR_DATA_TYPE_TIME; } else if (dataType.equals("TIME_OF_DAY")) { dataTypeInt = Constants.AR_DATA_TYPE_TIME_OF_DAY; } else if (dataType.equals("TRIM")) { dataTypeInt = Constants.AR_DATA_TYPE_TRIM; } else if (dataType.equals("ULONG")) { dataTypeInt = Constants.AR_DATA_TYPE_ULONG; } else if (dataType.equals("VIEW")) { dataTypeInt = Constants.AR_DATA_TYPE_VIEW; } else { logger.debug("Unknown DataType: " + dataType); dataTypeInt = Constants.AR_DATA_TYPE_NULL; } return dataTypeInt; } }
package wah.giovann.csvhandler; import wah.giovann.csvhandler.error.CSVIntegrityException; import wah.giovann.csvhandler.error.ValueConversionException; import java.util.*; /** * The <code>CSVRecord</code> class represents a row of data in a .csv file. <code>CSVRecord</code>s are not designed * to be instantiated manually. An instance of of this class * can only be created in the context of first creating a <code>CSVArray</code> object, and then accessing its <code>CSVRecord</code>s * as you would the members of any other <code>List</code> object. All data in a <code>CSVRecord</code> are stored as <code>String</code>s. * @author Giovann Wah * @version 1.0 */ public class CSVRecord { private ArrayList<String> data; private CSVHeader sharedHeader; protected CSVRecord(CSVHeader h) { this.sharedHeader = h; this.data = new ArrayList<>(); for (int i = 0; i < h.totalColumns(); i++){ this.data.add(""); } } protected CSVRecord(CSVHeader h, ArrayList<String> d) { if (h.totalColumns() == d.size()) { this.sharedHeader = h; this.data = new ArrayList(d); } else { List<Object> headerAndData = new ArrayList<>(); headerAndData.add(h.getColumnsList()); headerAndData.add(d); throw new CSVIntegrityException(CSVIntegrityException.HEADER_AND_RECORD_DATA_INCOMPATABLE, headerAndData); } } protected CSVRecord(CSVRecord other) { this.sharedHeader = other.sharedHeader; this.data = new ArrayList<>(other.data); } /** * Returns true if one of the data items in this instance is mapped to the header column <code>name</code>. * @param name The name of the header to check for. * @return True if the header contains this value */ public boolean containsHeaderColumn(String name){ return this.sharedHeader.containsColumn(name); } /** * Returns true if this <code>CSVRecord</code> contains this <code>String</code> value at any index. * @param value The <code>String</code> to search for. * @return True if this object contains the value. */ public boolean containsValue(String value){ return data.contains(value); } /** * Returns the <code>String</code> value at the specified column. * @param columnName The name of the column to get the value from. * @return The <code>String</code> value at the specified column. */ public String get(String columnName) { if (!this.sharedHeader.containsColumn(columnName)) { throw new ArrayIndexOutOfBoundsException("No column named \""+columnName+"\" exists in this CSVRecord."); } return this.get(this.sharedHeader.indexOfColumn(columnName)); } /** * Returns the <code>String</code> value at the specified column. * @param column The index of the column to get the value from. * @return The <code>String</code> value at the specified column. */ public String get(int column) { return data.get(column); } /** * Returns the name of the column at the specified index. * @param index The column index * @return The name of the column at that index. */ public String getColumnName(int index) { return this.sharedHeader.getColumnName(index); } /** * Returns the total number of values in this object. * @return The number of values in this <code>CSVRecord.</code> */ public int size(){ return this.data.size(); } /** * Checks if this record is empty or not. A record is considered empty if its values are all null or the empty string. * @return <code>true</code> if this CSVRecord is empty. */ public boolean isEmpty() { if (this.data.isEmpty()) return true; else { for (String s : this.data){ if (s != null && !s.equals("")) return false; } return true; } } /** * Swaps the values at the specified indices. * @param index1 The first index. * @param index2 The second index. */ public void swapValues(int index1, int index2){ if (index1 >= 0 && index1 < this.data.size() && index2 >= 0 && index1 < this.data.size()) { String temp = this.data.get(index1); this.data.set(index1, this.data.get(index2)); this.data.set(index2, temp); } else { ArrayList<Integer> indices = new ArrayList<>(); indices.add(index1); indices.add(index2); throw new CSVIntegrityException(CSVIntegrityException.INVALID_VALUE_SWAP, indices); } } /** * Sets the value at the specified column to the specified value. * @param column The column who's value will be set. * @param value The new value. */ public void set (String column, Object value) { if (this.sharedHeader.containsColumn(column)) { this.data.set(this.sharedHeader.indexOfColumn(column), value.toString()); } else { throw new CSVIntegrityException(CSVIntegrityException.INVALID_CSVRECORD_ADD, column); } } /** * Sets the value at the specified column to the specified value. * @param columnIndex The column index who's value will be set. * @param value The new value. */ public void set (int columnIndex, Object value) { if (columnIndex < this.data.size()) this.data.set(columnIndex, value.toString()); else throw new CSVIntegrityException(CSVIntegrityException.INVALID_CSVRECORD_ADD, columnIndex); } /** * Returns a list of all the values in this <code>CSVRecord</code>. * @return A <code>List</code> containing this record's values. */ public List getValues() { return new ArrayList(this.data); } /** * Returns a list of all the column names in this record. * @return A <code>List</code> containing this record's header values. */ public List getHeaderList() { return this.sharedHeader.getColumnsList(); } /** * Returns a <code>String</code> representation of this record's header. * @return A <code>String</code> representing the header. */ public String getHeaderString() { return this.sharedHeader.toString(); } /** * Sets all values of this <code>CSVRecord</code> to the empty string. */ public void clearAll() { for (int i = 0; i < this.data.size(); i++){ clear(i); } } /** * Sets the value at the specified column to the empty string. * @param columnNum The index of the column to clear. */ public void clear(int columnNum) { this.set(columnNum, ""); } /** * Sets the value at the specified column to the empty string. * @param column The name of the column to clear. */ public void clear(String column) { this.set(column, ""); } /** * Returns the value at the specified column as a <code>double</code>. * @param column The column name who's value should be returned. * @return <code>double</code> value at the column. */ public double getDouble(String column){ String obj = this.get(column); if (obj != null){ return new Double(obj).doubleValue(); } else { throw new ValueConversionException(ValueConversionException.DOUBLE, column, this); } } /** * Returns the value at the specified column as a <code>double</code>. * @param column The column index who's value should be returned. * @return <code>double</code> value at the column. */ public double getDouble(int column){ String obj = this.get(column); if (obj != null){ return new Double(obj).doubleValue(); } else { throw new ValueConversionException(ValueConversionException.DOUBLE, column, this); } } /** * Returns the value at the specified column as a <code>float</code>. * @param column The column name who's value should be returned. * @return <code>float</code> value at the column. */ public float getFloat(String column) { String obj = this.get(column); if (obj != null) { return new Float(obj).floatValue(); } else { throw new ValueConversionException(ValueConversionException.FLOAT, column, this); } } /** * Returns the value at the specified column as a <code>flaot</code>. * @param column The column index who's value should be returned. * @return <code>float</code> value at the column. */ public float getFloat(int column) { String obj = this.get(column); if (obj != null) { return new Float(obj).floatValue(); } else { throw new ValueConversionException(ValueConversionException.FLOAT, column, this); } } /** * Returns the value at the specified column as a <code>long</code>. * @param column The column name who's value should be returned. * @return <code>long</code> value at the column. */ public long getLong(String column) { String obj = this.get(column); if (obj != null) { return new Long(obj).longValue(); } else { throw new ValueConversionException(ValueConversionException.LONG, column, this); } } /** * Returns the value at the specified column as a <code>long</code>. * @param column The column index who's value should be returned. * @return <code>long</code> value at the column. */ public long getLong(int column) { String obj = this.get(column); if (obj != null) { return new Long(obj).longValue(); } else { throw new ValueConversionException(ValueConversionException.LONG, column, this); } } /** * Returns the value at the specified column as a <code>int</code>. * @param column The column name who's value should be returned. * @return <code>int</code> value at the column. */ public int getInt(String column) { String obj = this.get(column); if (obj != null) { return new Integer(obj).intValue(); } else { throw new ValueConversionException(ValueConversionException.INT, column, this); } } /** * Returns the value at the specified column as a <code>int</code>. * @param column The column index who's value should be returned. * @return <code>int</code> value at the column. */ public int getInt(int column) { String obj = this.get(column); if (obj != null) { return new Integer(obj).intValue(); } else { throw new ValueConversionException(ValueConversionException.INT, column, this); } } /** * Returns the value at the specified column as a <code>char</code>. * @param column The column name who's value should be returned. * @return <code>char</code> value at the column. */ public char getChar(String column) { String obj = this.get(column); if (obj != null && obj.length() == 1) { return obj.charAt(0); } else { throw new ValueConversionException(ValueConversionException.CHAR, column, this); } } /** * Returns the value at the specified column as a <code>char</code>. * @param column The column index who's value should be returned. * @return <code>char</code> value at the column. */ public char getChar(int column) { String obj = this.get(column); if (obj != null && obj.length() == 1) { return obj.charAt(0); } else { throw new ValueConversionException(ValueConversionException.CHAR, column, this); } } /** * Returns the value at the specified column as a <code>short</code>. * @param column The column name who's value should be returned. * @return <code>short</code> value at the column. */ public short getShort(String column) { String obj = this.get(column); if (obj != null && obj.length() == 1) { return new Short(obj).shortValue(); } else { throw new ValueConversionException(ValueConversionException.SHORT, column, this); } } /** * Returns the value at the specified column as a <code>short</code>. * @param column The column index who's value should be returned. * @return <code>short</code> value at the column. */ public short getShort(int column) { String obj = this.get(column); if (obj != null && obj.length() == 1) { return new Short(obj).shortValue(); } else { throw new ValueConversionException(ValueConversionException.SHORT, column, this); } } /** * Returns the value at the specified column as a <code>byte</code>. * @param column The column name who's value should be returned. * @return <code>byte</code> value at the column. */ public byte getByte(String column) { String obj = this.get(column); if (obj != null && obj.length() == 1) { return new Byte(obj).byteValue(); } else { throw new ValueConversionException(ValueConversionException.BYTE, column, this); } } /** * Returns the value at the specified column as a <code>byte</code>. * @param column The column index who's value should be returned. * @return <code>byte</code> value at the column. */ public byte getByte(int column) { String obj = this.get(column); if (obj != null && obj.length() == 1) { return new Byte(obj).byteValue(); } else { throw new ValueConversionException(ValueConversionException.BYTE, column, this); } } /** * Returns the value at the specified column as a <code>boolean</code>. * @param column The column name who's value should be returned. * @return <code>boolean</code> value at the column. */ public boolean getBoolean(String column) { String obj = this.get(column); if (obj != null && obj.length() == 1) { return new Boolean(obj).booleanValue(); } else { throw new ValueConversionException(ValueConversionException.BOOLEAN, column, this); } } /** * Returns the value at the specified column as a <code>boolean</code>. * @param column The column index who's value should be returned. * @return <code>boolean</code> value at the column. */ public boolean getBoolean(int column) { String obj = this.get(column); if (obj != null && obj.length() == 1) { return new Boolean(obj).booleanValue(); } else { throw new ValueConversionException(ValueConversionException.BOOLEAN, column, this); } } /** * Returns a <code>String</code> representation of this <code>CSVRecord</code>, delimited by the <code>char</code>. * value passed as argument. * @param delimiter The delimiter to use in the representation. * @return A <code>String</code> representation of this <code>CSVRecord</code> */ public String getRecordString(char delimiter) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < this.data.size(); i++){ sb.append(this.data.get(i)); if (i < this.data.size()-1) sb.append(delimiter); } return sb.toString(); } @Override public String toString(){ StringBuilder sb = new StringBuilder(); for (String s : this.data) { sb.append('['); sb.append(s); sb.append(']'); } return sb.toString(); } @Override public boolean equals(Object other) { CSVRecord o = (CSVRecord) other; if (!o.sharedHeader.equals(this.sharedHeader)) return false; else { for (int i = 0; i < this.data.size(); i++) { if (!this.get(i).equals(o.get(i))) return false; } return true; } } protected String remove(int columnNum) { if (columnNum < this.sharedHeader.totalColumns()) return this.data.remove(columnNum); else return null; } protected String remove(String column){ if (this.sharedHeader.containsColumn(column)) return this.data.remove(this.sharedHeader.indexOfColumn(column)); else return null; } protected void setSharedHeader(CSVHeader header) { this.sharedHeader = header; } protected void insert(int index, String value) { this.data.add(index, value); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.web; import static org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.KERBEROS; import static org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.SIMPLE; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.mockito.Matchers.*; import static org.mockito.Mockito.*; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.URI; import java.net.URL; import java.net.URLConnection; import java.security.PrivilegedExceptionAction; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.web.resources.*; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.Token; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.internal.util.reflection.Whitebox; public class TestWebHdfsTokens { private static Configuration conf; URI uri = null; @BeforeClass public static void setUp() { conf = new Configuration(); SecurityUtil.setAuthenticationMethod(KERBEROS, conf); UserGroupInformation.setConfiguration(conf); UserGroupInformation.setLoginUser( UserGroupInformation.createUserForTesting( "LoginUser", new String[]{"supergroup"})); } private WebHdfsFileSystem spyWebhdfsInSecureSetup() throws IOException { WebHdfsFileSystem fsOrig = new WebHdfsFileSystem(); fsOrig.initialize(URI.create("webhdfs://127.0.0.1:0"), conf); WebHdfsFileSystem fs = spy(fsOrig); return fs; } @Test(timeout = 5000) public void testTokenForNonTokenOp() throws IOException { WebHdfsFileSystem fs = spyWebhdfsInSecureSetup(); Token<?> token = mock(Token.class); doReturn(token).when(fs).getDelegationToken(null); // should get/set/renew token fs.toUrl(GetOpParam.Op.OPEN, null); verify(fs).getDelegationToken(); verify(fs).getDelegationToken(null); verify(fs).setDelegationToken(token); reset(fs); // should return prior token fs.toUrl(GetOpParam.Op.OPEN, null); verify(fs).getDelegationToken(); verify(fs, never()).getDelegationToken(null); verify(fs, never()).setDelegationToken(token); } @Test(timeout = 5000) public void testNoTokenForGetToken() throws IOException { checkNoTokenForOperation(GetOpParam.Op.GETDELEGATIONTOKEN); } @Test(timeout = 5000) public void testNoTokenForRenewToken() throws IOException { checkNoTokenForOperation(PutOpParam.Op.RENEWDELEGATIONTOKEN); } @Test(timeout = 5000) public void testNoTokenForCancelToken() throws IOException { checkNoTokenForOperation(PutOpParam.Op.CANCELDELEGATIONTOKEN); } private void checkNoTokenForOperation(HttpOpParam.Op op) throws IOException { WebHdfsFileSystem fs = spyWebhdfsInSecureSetup(); doReturn(null).when(fs).getDelegationToken(null); fs.initialize(URI.create("webhdfs://127.0.0.1:0"), conf); // do not get a token! fs.toUrl(op, null); verify(fs, never()).getDelegationToken(); verify(fs, never()).getDelegationToken(null); verify(fs, never()).setDelegationToken((Token<?>)any(Token.class)); } @Test(timeout = 1000) public void testGetOpRequireAuth() { for (HttpOpParam.Op op : GetOpParam.Op.values()) { boolean expect = (op == GetOpParam.Op.GETDELEGATIONTOKEN); assertEquals(expect, op.getRequireAuth()); } } @Test(timeout = 1000) public void testPutOpRequireAuth() { for (HttpOpParam.Op op : PutOpParam.Op.values()) { boolean expect = (op == PutOpParam.Op.RENEWDELEGATIONTOKEN || op == PutOpParam.Op.CANCELDELEGATIONTOKEN); assertEquals(expect, op.getRequireAuth()); } } @Test(timeout = 1000) public void testPostOpRequireAuth() { for (HttpOpParam.Op op : PostOpParam.Op.values()) { assertFalse(op.getRequireAuth()); } } @Test(timeout = 1000) public void testDeleteOpRequireAuth() { for (HttpOpParam.Op op : DeleteOpParam.Op.values()) { assertFalse(op.getRequireAuth()); } } @SuppressWarnings("unchecked") // for any(Token.class) @Test public void testLazyTokenFetchForWebhdfs() throws Exception { MiniDFSCluster cluster = null; WebHdfsFileSystem fs = null; try { final Configuration clusterConf = new HdfsConfiguration(conf); SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf); clusterConf.setBoolean(DFSConfigKeys .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true); // trick the NN into thinking security is enabled w/o it trying // to login from a keytab UserGroupInformation.setConfiguration(clusterConf); cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(1).build(); cluster.waitActive(); SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf); UserGroupInformation.setConfiguration(clusterConf); uri = DFSUtil.createUri( "webhdfs", cluster.getNameNode().getHttpAddress()); validateLazyTokenFetch(clusterConf); } finally { IOUtils.cleanup(null, fs); if (cluster != null) { cluster.shutdown(); } } } @SuppressWarnings("unchecked") // for any(Token.class) @Test public void testLazyTokenFetchForSWebhdfs() throws Exception { MiniDFSCluster cluster = null; SWebHdfsFileSystem fs = null; try { final Configuration clusterConf = new HdfsConfiguration(conf); SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf); clusterConf.setBoolean(DFSConfigKeys .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true); String BASEDIR = System.getProperty("test.build.dir", "target/test-dir") + "/" + TestWebHdfsTokens.class.getSimpleName(); String keystoresDir; String sslConfDir; clusterConf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name()); clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0"); clusterConf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0"); File base = new File(BASEDIR); FileUtil.fullyDelete(base); base.mkdirs(); keystoresDir = new File(BASEDIR).getAbsolutePath(); sslConfDir = KeyStoreTestUtil.getClasspathDir(TestWebHdfsTokens.class); KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, clusterConf, false); // trick the NN into thinking security is enabled w/o it trying // to login from a keytab UserGroupInformation.setConfiguration(clusterConf); cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(1).build(); cluster.waitActive(); InetSocketAddress addr = cluster.getNameNode().getHttpsAddress(); String nnAddr = NetUtils.getHostPortString(addr); clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr); SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf); UserGroupInformation.setConfiguration(clusterConf); uri = DFSUtil.createUri( "swebhdfs", cluster.getNameNode().getHttpsAddress()); validateLazyTokenFetch(clusterConf); } finally { IOUtils.cleanup(null, fs); if (cluster != null) { cluster.shutdown(); } } } @Test public void testSetTokenServiceAndKind() throws Exception { MiniDFSCluster cluster = null; try { final Configuration clusterConf = new HdfsConfiguration(conf); SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf); clusterConf.setBoolean(DFSConfigKeys .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true); // trick the NN into thinking s[ecurity is enabled w/o it trying // to login from a keytab UserGroupInformation.setConfiguration(clusterConf); cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(0).build(); cluster.waitActive(); SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf); final WebHdfsFileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem (clusterConf, "webhdfs"); Whitebox.setInternalState(fs, "canRefreshDelegationToken", true); URLConnectionFactory factory = new URLConnectionFactory(new ConnectionConfigurator() { @Override public HttpURLConnection configure(HttpURLConnection conn) throws IOException { return conn; } }) { @Override public URLConnection openConnection(URL url) throws IOException { return super.openConnection(new URL(url + "&service=foo&kind=bar")); } }; Whitebox.setInternalState(fs, "connectionFactory", factory); Token<?> token1 = fs.getDelegationToken(); Assert.assertEquals(new Text("bar"), token1.getKind()); final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN; Token<DelegationTokenIdentifier> token2 = fs.new FsPathResponseRunner<Token<DelegationTokenIdentifier>>( op, null, new RenewerParam(null)) { @Override Token<DelegationTokenIdentifier> decodeResponse(Map<?, ?> json) throws IOException { return JsonUtilClient.toDelegationToken(json); } }.run(); Assert.assertEquals(new Text("bar"), token2.getKind()); Assert.assertEquals(new Text("foo"), token2.getService()); } finally { if (cluster != null) { cluster.shutdown(); } } } @SuppressWarnings("unchecked") private void validateLazyTokenFetch(final Configuration clusterConf) throws Exception{ final String testUser = "DummyUser"; UserGroupInformation ugi = UserGroupInformation.createUserForTesting( testUser, new String[]{"supergroup"}); WebHdfsFileSystem fs = ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() { @Override public WebHdfsFileSystem run() throws IOException { return spy((WebHdfsFileSystem) FileSystem.newInstance(uri, clusterConf)); } }); // verify token ops don't get a token Assert.assertNull(fs.getRenewToken()); Token<?> token = fs.getDelegationToken(null); fs.renewDelegationToken(token); fs.cancelDelegationToken(token); verify(fs, never()).getDelegationToken(); verify(fs, never()).replaceExpiredDelegationToken(); verify(fs, never()).setDelegationToken(any(Token.class)); Assert.assertNull(fs.getRenewToken()); reset(fs); // verify first non-token op gets a token final Path p = new Path("/f"); fs.create(p, (short)1).close(); verify(fs, times(1)).getDelegationToken(); verify(fs, never()).replaceExpiredDelegationToken(); verify(fs, times(1)).getDelegationToken(anyString()); verify(fs, times(1)).setDelegationToken(any(Token.class)); token = fs.getRenewToken(); Assert.assertNotNull(token); Assert.assertEquals(testUser, getTokenOwner(token)); Assert.assertEquals(fs.getTokenKind(), token.getKind()); reset(fs); // verify prior token is reused fs.getFileStatus(p); verify(fs, times(1)).getDelegationToken(); verify(fs, never()).replaceExpiredDelegationToken(); verify(fs, never()).getDelegationToken(anyString()); verify(fs, never()).setDelegationToken(any(Token.class)); Token<?> token2 = fs.getRenewToken(); Assert.assertNotNull(token2); Assert.assertEquals(fs.getTokenKind(), token.getKind()); Assert.assertSame(token, token2); reset(fs); // verify renew of expired token fails w/o getting a new token token = fs.getRenewToken(); fs.cancelDelegationToken(token); try { fs.renewDelegationToken(token); Assert.fail("should have failed"); } catch (InvalidToken it) { } catch (Exception ex) { Assert.fail("wrong exception:"+ex); } verify(fs, never()).getDelegationToken(); verify(fs, never()).replaceExpiredDelegationToken(); verify(fs, never()).getDelegationToken(anyString()); verify(fs, never()).setDelegationToken(any(Token.class)); token2 = fs.getRenewToken(); Assert.assertNotNull(token2); Assert.assertEquals(fs.getTokenKind(), token.getKind()); Assert.assertSame(token, token2); reset(fs); // verify cancel of expired token fails w/o getting a new token try { fs.cancelDelegationToken(token); Assert.fail("should have failed"); } catch (InvalidToken it) { } catch (Exception ex) { Assert.fail("wrong exception:"+ex); } verify(fs, never()).getDelegationToken(); verify(fs, never()).replaceExpiredDelegationToken(); verify(fs, never()).getDelegationToken(anyString()); verify(fs, never()).setDelegationToken(any(Token.class)); token2 = fs.getRenewToken(); Assert.assertNotNull(token2); Assert.assertEquals(fs.getTokenKind(), token.getKind()); Assert.assertSame(token, token2); reset(fs); // verify an expired token is replaced with a new token fs.open(p).close(); verify(fs, times(2)).getDelegationToken(); // first bad, then good verify(fs, times(1)).replaceExpiredDelegationToken(); verify(fs, times(1)).getDelegationToken(null); verify(fs, times(1)).setDelegationToken(any(Token.class)); token2 = fs.getRenewToken(); Assert.assertNotNull(token2); Assert.assertNotSame(token, token2); Assert.assertEquals(fs.getTokenKind(), token.getKind()); Assert.assertEquals(testUser, getTokenOwner(token2)); reset(fs); // verify with open because it's a little different in how it // opens connections fs.cancelDelegationToken(fs.getRenewToken()); InputStream is = fs.open(p); is.read(); is.close(); verify(fs, times(2)).getDelegationToken(); // first bad, then good verify(fs, times(1)).replaceExpiredDelegationToken(); verify(fs, times(1)).getDelegationToken(null); verify(fs, times(1)).setDelegationToken(any(Token.class)); token2 = fs.getRenewToken(); Assert.assertNotNull(token2); Assert.assertNotSame(token, token2); Assert.assertEquals(fs.getTokenKind(), token.getKind()); Assert.assertEquals(testUser, getTokenOwner(token2)); reset(fs); // verify fs close cancels the token fs.close(); verify(fs, never()).getDelegationToken(); verify(fs, never()).replaceExpiredDelegationToken(); verify(fs, never()).getDelegationToken(anyString()); verify(fs, never()).setDelegationToken(any(Token.class)); verify(fs, times(1)).cancelDelegationToken(eq(token2)); // add a token to ugi for a new fs, verify it uses that token token = fs.getDelegationToken(null); ugi.addToken(token); fs = ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() { @Override public WebHdfsFileSystem run() throws IOException { return spy((WebHdfsFileSystem) FileSystem.newInstance(uri, clusterConf)); } }); Assert.assertNull(fs.getRenewToken()); fs.getFileStatus(new Path("/")); verify(fs, times(1)).getDelegationToken(); verify(fs, never()).replaceExpiredDelegationToken(); verify(fs, never()).getDelegationToken(anyString()); verify(fs, times(1)).setDelegationToken(eq(token)); token2 = fs.getRenewToken(); Assert.assertNotNull(token2); Assert.assertEquals(fs.getTokenKind(), token.getKind()); Assert.assertSame(token, token2); reset(fs); // verify it reuses the prior ugi token fs.getFileStatus(new Path("/")); verify(fs, times(1)).getDelegationToken(); verify(fs, never()).replaceExpiredDelegationToken(); verify(fs, never()).getDelegationToken(anyString()); verify(fs, never()).setDelegationToken(any(Token.class)); token2 = fs.getRenewToken(); Assert.assertNotNull(token2); Assert.assertEquals(fs.getTokenKind(), token.getKind()); Assert.assertSame(token, token2); reset(fs); // verify an expired ugi token is NOT replaced with a new token fs.cancelDelegationToken(token); for (int i=0; i<2; i++) { try { fs.getFileStatus(new Path("/")); Assert.fail("didn't fail"); } catch (InvalidToken it) { } catch (Exception ex) { Assert.fail("wrong exception:"+ex); } verify(fs, times(1)).getDelegationToken(); verify(fs, times(1)).replaceExpiredDelegationToken(); verify(fs, never()).getDelegationToken(anyString()); verify(fs, never()).setDelegationToken(any(Token.class)); token2 = fs.getRenewToken(); Assert.assertNotNull(token2); Assert.assertEquals(fs.getTokenKind(), token.getKind()); Assert.assertSame(token, token2); reset(fs); } // verify fs close does NOT cancel the ugi token fs.close(); verify(fs, never()).getDelegationToken(); verify(fs, never()).replaceExpiredDelegationToken(); verify(fs, never()).getDelegationToken(anyString()); verify(fs, never()).setDelegationToken(any(Token.class)); verify(fs, never()).cancelDelegationToken(any(Token.class)); } private String getTokenOwner(Token<?> token) throws IOException { // webhdfs doesn't register properly with the class loader @SuppressWarnings({ "rawtypes", "unchecked" }) Token<?> clone = new Token(token); clone.setKind(DelegationTokenIdentifier.HDFS_DELEGATION_KIND); return clone.decodeIdentifier().getUser().getUserName(); } }
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.web.spring; import java.util.HashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; import com.opengamma.engine.function.config.AbstractFunctionConfigurationBean; import com.opengamma.engine.function.config.CombiningFunctionConfigurationSource; import com.opengamma.engine.function.config.FunctionConfiguration; import com.opengamma.engine.function.config.FunctionConfigurationSource; import com.opengamma.engine.function.config.ParameterizedFunctionConfiguration; import com.opengamma.engine.value.SurfaceAndCubePropertyNames; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.financial.analytics.CurrencyPairsDefaults; import com.opengamma.financial.analytics.model.bond.BondFunctions; import com.opengamma.financial.analytics.model.credit.CreditFunctions; import com.opengamma.financial.analytics.model.curve.CurveFunctions; import com.opengamma.financial.analytics.model.curve.forward.ForwardFunctions; import com.opengamma.financial.analytics.model.equity.option.OptionFunctions; import com.opengamma.financial.analytics.model.equity.portfoliotheory.PortfolioTheoryFunctions; import com.opengamma.financial.analytics.model.future.FutureFunctions; import com.opengamma.financial.analytics.model.futureoption.FutureOptionFunctions; import com.opengamma.financial.analytics.model.irfutureoption.IRFutureOptionFunctions; import com.opengamma.financial.analytics.model.multicurve.MultiCurvePricingFunctions; import com.opengamma.financial.analytics.model.pnl.PNLFunctions; import com.opengamma.financial.analytics.model.sensitivities.SensitivitiesFunctions; import com.opengamma.financial.analytics.model.var.VaRFunctions; import com.opengamma.financial.analytics.model.volatility.local.defaultproperties.LocalVolatilitySurfaceDefaults; import com.opengamma.financial.currency.CurrencyPairs; import com.opengamma.lambdava.functions.Function1; import com.opengamma.util.SingletonFactoryBean; import com.opengamma.util.tuple.Pair; import com.opengamma.util.tuple.Pairs; import com.opengamma.web.spring.defaults.GeneralLocalVolatilitySurfaceDefaults; /** * Constructs a standard function repository. * <p> * A sub-class should provide installation specific details relating to the data providers used. */ @SuppressWarnings("deprecation") public abstract class StandardFunctionConfiguration extends AbstractFunctionConfigurationBean { private static final Logger LOGGER = LoggerFactory.getLogger(StandardFunctionConfiguration.class); /** * Holds one or more values referenced by a hierarchical key. */ public static class Value { private final Map<String, String> _values = new HashMap<>(); public void set(final String key, final String value) { _values.put(key, value); } // TODO: allow wildcard matches, e.g. */discounting public String get(final String key) { final String value = _values.get(key); if (value != null) { return value; } final int separator = key.lastIndexOf('/'); if (separator == -1) { return _values.get(null); } return get(key.substring(0, separator)); } } /** * Constants for a particular currency. */ public static class CurrencyInfo { /** The currency string */ private final String _currency; /** Usually the default value of the {@link ValuePropertyNames#CURVE_CONSTRUCTION_CONFIG} property */ private final Value _curveConfiguration = new Value(); /** Usually the default value of the {@link ValuePropertyNames#CURVE} property */ private final Value _curveName = new Value(); /** Usually the default value of the {@link ValuePropertyNames#CURVE_CALCULATION_METHOD} property */ private final Value _curveCalculationMethodName = new Value(); /** Usually the default value of the {@link ValuePropertyNames#SURFACE} property */ private final Value _surfaceName = new Value(); /** Usually the default value of the {@link ValuePropertyNames#CUBE} property */ private final Value _cubeName = new Value(); /** The forward curve name */ private final Value _forwardCurveName = new Value(); /** The forward curve calculation method */ private final Value _forwardCurveCalculationMethod = new Value(); /** The surface calculation method */ private final Value _surfaceCalculationMethod = new Value(); /** Usually the default value of the {@link SurfaceAndCubePropertyNames#PROPERTY_CUBE_DEFINITION} property */ private final Value _cubeDefinitionName = new Value(); /** Usually the default value of the {@link SurfaceAndCubePropertyNames#PROPERTY_CUBE_SPECIIFICATION} property */ private final Value _cubeSpecificationName = new Value(); /** Usually the default value of the {@link SurfaceAndCubePropertyNames#PROPERTY_SURFACE_DEFINITION} property */ private final Value _surfaceDefinitionName = new Value(); /** Usually the default value of the {@link SurfaceAndCubePropertyNames#PROPERTY_SURFACE_SPECIIFICATION} property */ private final Value _surfaceSpecificationName = new Value(); public CurrencyInfo(final String currency) { _currency = currency; } public String getCurrency() { return _currency; } public void setCurveConfiguration(final String key, final String curveConfiguration) { _curveConfiguration.set(key, curveConfiguration); } public String getCurveConfiguration(final String key) { return _curveConfiguration.get(key); } public void setCurveName(final String key, final String curveName) { _curveName.set(key, curveName); } public String getCurveName(final String key) { return _curveName.get(key); } public void setCurveCalculationMethodName(final String key, final String curveCalculationMethodName) { _curveCalculationMethodName.set(key, curveCalculationMethodName); } public String getCurveCalculationMethodName(final String key) { return _curveCalculationMethodName.get(key); } public void setSurfaceName(final String key, final String surfaceName) { _surfaceName.set(key, surfaceName); } public String getSurfaceName(final String key) { return _surfaceName.get(key); } public void setCubeName(final String key, final String cubeName) { _cubeName.set(key, cubeName); } public String getCubeName(final String key) { return _cubeName.get(key); } public void setForwardCurveName(final String key, final String forwardCurveName) { _forwardCurveName.set(key, forwardCurveName); } public String getForwardCurveName(final String key) { return _forwardCurveName.get(key); } public void setForwardCurveCalculationMethod(final String key, final String forwardCurveCalculationMethod) { _forwardCurveCalculationMethod.set(key, forwardCurveCalculationMethod); } public String getForwardCurveCalculationMethod(final String key) { return _forwardCurveCalculationMethod.get(key); } public void setSurfaceCalculationMethod(final String key, final String surfaceCalculationMethod) { _surfaceCalculationMethod.set(key, surfaceCalculationMethod); } public String getSurfaceCalculationMethod(final String key) { return _surfaceCalculationMethod.get(key); } /** * Gets the cube definition name for a key. * * @param key * The key * @return The cube definition name */ public String getCubeDefinitionName(final String key) { return _cubeDefinitionName.get(key); } /** * Sets a cube definition name for a key. * * @param key * The key * @param cubeDefinitionName * The cube definition name */ public void setCubeDefinitionName(final String key, final String cubeDefinitionName) { _cubeDefinitionName.set(key, cubeDefinitionName); } /** * Gets the cube specification name for a key. * * @param key * The key * @return The cube specification name */ public String getCubeSpecificationName(final String key) { return _cubeSpecificationName.get(key); } /** * Sets a cube specification name for a key. * * @param key * The key * @param cubeSpecificationName * The cube specification name */ public void setCubeSpecificationName(final String key, final String cubeSpecificationName) { _cubeSpecificationName.set(key, cubeSpecificationName); } /** * Gets the surface definition name for a key. * * @param key * The key * @return The surface definition name */ public String getSurfaceDefinitionName(final String key) { return _surfaceDefinitionName.get(key); } /** * Sets a surface definition name for a key. * * @param key * The key * @param surfaceDefinitionName * The surface definition name */ public void setSurfaceDefinitionName(final String key, final String surfaceDefinitionName) { _surfaceDefinitionName.set(key, surfaceDefinitionName); } /** * Gets the surface specification name for a key. * * @param key * The key * @return The surface specification name */ public String getSurfaceSpecificationName(final String key) { return _surfaceSpecificationName.get(key); } /** * Sets a surface specification name for a key. * * @param key * The key * @param surfaceSpecificationName * The surface specification name */ public void setSurfaceSpecificationName(final String key, final String surfaceSpecificationName) { _surfaceSpecificationName.set(key, surfaceSpecificationName); } } /** * Constants for a particular currency pair. */ public static class CurrencyPairInfo { private final Pair<String, String> _currencies; private final Value _curveName = new Value(); private final Value _curveCalculationMethod = new Value(); private final Value _surfaceName = new Value(); private final Value _forwardCurveName = new Value(); public CurrencyPairInfo(final Pair<String, String> currencies) { _currencies = currencies; } public Pair<String, String> getCurrencies() { return _currencies; } public void setCurveName(final String key, final String curveName) { _curveName.set(key, curveName); } public String getCurveName(final String key) { return _curveName.get(key); } public void setCurveCalculationMethod(final String key, final String curveCalculationMethod) { _curveCalculationMethod.set(key, curveCalculationMethod); } public String getCurveCalculationMethod(final String key) { return _curveCalculationMethod.get(key); } public void setSurfaceName(final String key, final String surfaceName) { _surfaceName.set(key, surfaceName); } public String getSurfaceName(final String key) { return _surfaceName.get(key); } public String getForwardCurveName(final String key) { return _forwardCurveName.get(key); } public void setForwardCurveName(final String key, final String forwardCurveName) { _forwardCurveName.set(key, forwardCurveName); } } private final Map<String, CurrencyInfo> _perCurrencyInfo = new HashMap<>(); private final Map<Pair<String, String>, CurrencyPairInfo> _perCurrencyPairInfo = new HashMap<>(); private String _mark2MarketField; private String _costOfCarryField; private double _absoluteTolerance; private double _relativeTolerance; private int _maxIterations; public StandardFunctionConfiguration() { setDefaultCurrencyInfo(); setDefaultCurrencyPairInfo(); } public void setPerCurrencyInfo(final Map<String, CurrencyInfo> perCurrencyInfo) { _perCurrencyInfo.clear(); _perCurrencyInfo.putAll(perCurrencyInfo); } public Map<String, CurrencyInfo> getPerCurrencyInfo() { return _perCurrencyInfo; } public void setCurrencyInfo(final String currency, final CurrencyInfo info) { _perCurrencyInfo.put(currency, info); } public CurrencyInfo getCurrencyInfo(final String currency) { return _perCurrencyInfo.get(currency); } protected <T> Map<String, T> getCurrencyInfo(final Function1<CurrencyInfo, T> filter) { final Map<String, T> result = new HashMap<>(); for (final Map.Entry<String, CurrencyInfo> e : getPerCurrencyInfo().entrySet()) { final T entry = filter.execute(e.getValue()); if (entry instanceof InitializingBean) { try { ((InitializingBean) entry).afterPropertiesSet(); } catch (final Exception ex) { LOGGER.debug("Skipping {}", e.getKey()); LOGGER.trace("Caught exception", e); continue; } } if (entry != null) { result.put(e.getKey(), entry); } } return result; } public void setPerCurrencyPairInfo(final Map<Pair<String, String>, CurrencyPairInfo> perCurrencyPairInfo) { _perCurrencyPairInfo.clear(); _perCurrencyPairInfo.putAll(perCurrencyPairInfo); } public Map<Pair<String, String>, CurrencyPairInfo> getPerCurrencyPairInfo() { return _perCurrencyPairInfo; } public void setCurrencyPairInfo(final Pair<String, String> currencyPair, final CurrencyPairInfo info) { _perCurrencyPairInfo.put(currencyPair, info); } public CurrencyPairInfo getCurrencyPairInfo(final Pair<String, String> currencyPair) { return _perCurrencyPairInfo.get(currencyPair); } protected <T> Map<Pair<String, String>, T> getCurrencyPairInfo(final Function1<CurrencyPairInfo, T> filter) { final Map<Pair<String, String>, T> result = new HashMap<>(); for (final Map.Entry<Pair<String, String>, CurrencyPairInfo> e : getPerCurrencyPairInfo().entrySet()) { final T entry = filter.execute(e.getValue()); if (entry instanceof InitializingBean) { try { ((InitializingBean) entry).afterPropertiesSet(); } catch (final Exception ex) { LOGGER.debug("Skipping {}", e.getKey()); LOGGER.trace("Caught exception", e); continue; } } if (entry != null) { result.put(e.getKey(), entry); } } return result; } public void setMark2MarketField(final String mark2MarketField) { _mark2MarketField = mark2MarketField; } public String getMark2MarketField() { return _mark2MarketField; } public void setCostOfCarryField(final String costOfCarryField) { _costOfCarryField = costOfCarryField; } public String getCostOfCarryField() { return _costOfCarryField; } /** * Sets the absolute tolerance for the curve root-finder. * * @param absoluteTolerance * The absolute tolerance, greater than zero */ public void setAbsoluteTolerance(final double absoluteTolerance) { _absoluteTolerance = absoluteTolerance; } /** * Gets the absolute tolerance for the curve root-finder. * * @return The absolute tolerance */ public double getAbsoluteTolerance() { return _absoluteTolerance; } /** * Sets the relative tolerance for the curve root-finder. * * @param relativeTolerance * The relative tolerance, greater than zero */ public void setRelativeTolerance(final double relativeTolerance) { _relativeTolerance = relativeTolerance; } /** * Gets the relative tolerance for the curve root-finder. * * @return The relative tolerance */ public double getRelativeTolerance() { return _relativeTolerance; } /** * Sets the maximum number of iterations for the curve root-finder. * * @param maxIterations * The maximum iterations, greater than zero */ public void setMaximumIterations(final int maxIterations) { _maxIterations = maxIterations; } /** * Gets the maximum number of iterations for the curve root-finder. * * @return The maximum iterations */ public int getMaximumIterations() { return _maxIterations; } protected CurrencyInfo defaultCurrencyInfo(final String currency) { return new CurrencyInfo(currency); } protected CurrencyInfo arsCurrencyInfo() { return defaultCurrencyInfo("ARS"); } protected CurrencyInfo audCurrencyInfo() { return defaultCurrencyInfo("AUD"); } protected CurrencyInfo brlCurrencyInfo() { return defaultCurrencyInfo("BRL"); } protected CurrencyInfo cadCurrencyInfo() { return defaultCurrencyInfo("CAD"); } protected CurrencyInfo chfCurrencyInfo() { return defaultCurrencyInfo("CHF"); } protected CurrencyInfo cnyCurrencyInfo() { return defaultCurrencyInfo("CNY"); } protected CurrencyInfo czkCurrencyInfo() { return defaultCurrencyInfo("CZK"); } protected CurrencyInfo egpCurrencyInfo() { return defaultCurrencyInfo("EGP"); } protected CurrencyInfo eurCurrencyInfo() { return defaultCurrencyInfo("EUR"); } protected CurrencyInfo gbpCurrencyInfo() { return defaultCurrencyInfo("GBP"); } protected CurrencyInfo hkdCurrencyInfo() { return defaultCurrencyInfo("HKD"); } protected CurrencyInfo hufCurrencyInfo() { return defaultCurrencyInfo("HUF"); } protected CurrencyInfo idrCurrencyInfo() { return defaultCurrencyInfo("IDR"); } protected CurrencyInfo ilsCurrencyInfo() { return defaultCurrencyInfo("ILS"); } protected CurrencyInfo inrCurrencyInfo() { return defaultCurrencyInfo("INR"); } protected CurrencyInfo jpyCurrencyInfo() { return defaultCurrencyInfo("JPY"); } protected CurrencyInfo krwCurrencyInfo() { return defaultCurrencyInfo("KRW"); } protected CurrencyInfo mxnCurrencyInfo() { return defaultCurrencyInfo("MXN"); } protected CurrencyInfo myrCurrencyInfo() { return defaultCurrencyInfo("MYR"); } protected CurrencyInfo nokCurrencyInfo() { return defaultCurrencyInfo("NOK"); } protected CurrencyInfo nzdCurrencyInfo() { return defaultCurrencyInfo("NZD"); } protected CurrencyInfo phpCurrencyInfo() { return defaultCurrencyInfo("PHP"); } protected CurrencyInfo plnCurrencyInfo() { return defaultCurrencyInfo("PLN"); } protected CurrencyInfo rubCurrencyInfo() { return defaultCurrencyInfo("RUB"); } protected CurrencyInfo sekCurrencyInfo() { return defaultCurrencyInfo("SEK"); } protected CurrencyInfo sgdCurrencyInfo() { return defaultCurrencyInfo("SGD"); } protected CurrencyInfo tryCurrencyInfo() { return defaultCurrencyInfo("TRY"); } protected CurrencyInfo twdCurrencyInfo() { return defaultCurrencyInfo("TWD"); } protected CurrencyInfo usdCurrencyInfo() { return defaultCurrencyInfo("USD"); } protected CurrencyInfo zarCurrencyInfo() { return defaultCurrencyInfo("ZAR"); } protected void setDefaultCurrencyInfo() { setCurrencyInfo("ARS", arsCurrencyInfo()); setCurrencyInfo("AUD", audCurrencyInfo()); setCurrencyInfo("BRL", brlCurrencyInfo()); setCurrencyInfo("CAD", cadCurrencyInfo()); setCurrencyInfo("CHF", chfCurrencyInfo()); setCurrencyInfo("CNY", cnyCurrencyInfo()); setCurrencyInfo("CZK", czkCurrencyInfo()); setCurrencyInfo("EGP", egpCurrencyInfo()); setCurrencyInfo("EUR", eurCurrencyInfo()); setCurrencyInfo("GBP", gbpCurrencyInfo()); setCurrencyInfo("HKD", hkdCurrencyInfo()); setCurrencyInfo("HUF", hufCurrencyInfo()); setCurrencyInfo("IDR", idrCurrencyInfo()); setCurrencyInfo("ILS", ilsCurrencyInfo()); setCurrencyInfo("INR", inrCurrencyInfo()); setCurrencyInfo("JPY", jpyCurrencyInfo()); setCurrencyInfo("KRW", krwCurrencyInfo()); setCurrencyInfo("MXN", mxnCurrencyInfo()); setCurrencyInfo("MYR", myrCurrencyInfo()); setCurrencyInfo("NOK", nokCurrencyInfo()); setCurrencyInfo("NZD", nzdCurrencyInfo()); setCurrencyInfo("PHP", phpCurrencyInfo()); setCurrencyInfo("PLN", plnCurrencyInfo()); setCurrencyInfo("RUB", rubCurrencyInfo()); setCurrencyInfo("SEK", sekCurrencyInfo()); setCurrencyInfo("SGD", sgdCurrencyInfo()); setCurrencyInfo("TRY", tryCurrencyInfo()); setCurrencyInfo("TWD", twdCurrencyInfo()); setCurrencyInfo("USD", usdCurrencyInfo()); setCurrencyInfo("ZAR", zarCurrencyInfo()); } protected CurrencyPairInfo defaultCurrencyPairInfo(final String c1, final String c2) { return new CurrencyPairInfo(Pairs.of(c1, c2)); } protected CurrencyPairInfo audKrwCurrencyPairInfo() { return defaultCurrencyPairInfo("AUD", "KRW"); } protected CurrencyPairInfo chfJpyCurrencyPairInfo() { return defaultCurrencyPairInfo("CHF", "JPY"); } protected CurrencyPairInfo eurBrlCurrencyPairInfo() { return defaultCurrencyPairInfo("EUR", "BRL"); } protected CurrencyPairInfo eurChfCurrencyPairInfo() { return defaultCurrencyPairInfo("EUR", "CHF"); } protected CurrencyPairInfo eurGbpCurrencyPairInfo() { return defaultCurrencyPairInfo("EUR", "GBP"); } protected CurrencyPairInfo eurJpyCurrencyPairInfo() { return defaultCurrencyPairInfo("EUR", "JPY"); } protected CurrencyPairInfo eurTryCurrencyPairInfo() { return defaultCurrencyPairInfo("EUR", "TRY"); } protected CurrencyPairInfo jpyKrwCurrencyPairInfo() { return defaultCurrencyPairInfo("JPY", "KRW"); } protected CurrencyPairInfo sekJpyCurrencyPairInfo() { return defaultCurrencyPairInfo("SEK", "JPY"); } protected CurrencyPairInfo usdAudCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "AUD"); } protected CurrencyPairInfo usdBrlCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "BRL"); } protected CurrencyPairInfo usdCadCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "CAD"); } protected CurrencyPairInfo usdChfCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "CHF"); } protected CurrencyPairInfo usdCnyCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "CNY"); } protected CurrencyPairInfo usdEurCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "EUR"); } protected CurrencyPairInfo usdGbpCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "GBP"); } protected CurrencyPairInfo usdHkdCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "HKD"); } protected CurrencyPairInfo usdHufCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "HUF"); } protected CurrencyPairInfo usdInrCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "INR"); } protected CurrencyPairInfo usdJpyCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "JPY"); } protected CurrencyPairInfo usdKrwCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "KRW"); } protected CurrencyPairInfo usdMxnCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "MXN"); } protected CurrencyPairInfo usdNokCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "NOK"); } protected CurrencyPairInfo usdNzdCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "NZD"); } protected CurrencyPairInfo usdSgdCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "SGD"); } protected CurrencyPairInfo usdZarCurrencyPairInfo() { return defaultCurrencyPairInfo("USD", "ZAR"); } protected void setDefaultCurrencyPairInfo() { setCurrencyPairInfo(Pairs.of("AUD", "KRW"), audKrwCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("CHF", "JPY"), chfJpyCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("EUR", "BRL"), eurBrlCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("EUR", "CHF"), eurChfCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("EUR", "GBP"), eurGbpCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("EUR", "JPY"), eurJpyCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("EUR", "TRY"), eurTryCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("JPY", "KRW"), jpyKrwCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("SEK", "JPY"), sekJpyCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "AUD"), usdAudCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "BRL"), usdBrlCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "CAD"), usdCadCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "CHF"), usdChfCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "CNY"), usdCnyCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "EUR"), usdEurCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "GBP"), usdGbpCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "HKD"), usdHkdCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "HUF"), usdHufCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "INR"), usdInrCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "JPY"), usdJpyCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "KRW"), usdKrwCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "MXN"), usdMxnCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "NOK"), usdNokCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "NZD"), usdNzdCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "SGD"), usdSgdCurrencyPairInfo()); setCurrencyPairInfo(Pairs.of("USD", "ZAR"), usdZarCurrencyPairInfo()); } protected void addCurrencyConversionFunctions(final List<FunctionConfiguration> functionConfigs) { functionConfigs.add(functionConfiguration(CurrencyPairsDefaults.class, CurrencyPairs.DEFAULT_CURRENCY_PAIRS)); } protected void addLocalVolatilitySurfaceDefaults(final List<FunctionConfiguration> functionConfigs) { functionConfigs.add(new ParameterizedFunctionConfiguration(LocalVolatilitySurfaceDefaults.class.getName(), GeneralLocalVolatilitySurfaceDefaults.getLocalVolatilitySurfaceDefaults())); } @Override protected void addAllConfigurations(final List<FunctionConfiguration> functions) { addCurrencyConversionFunctions(functions); addLocalVolatilitySurfaceDefaults(functions); } protected FunctionConfigurationSource getRepository(final SingletonFactoryBean<FunctionConfigurationSource> defaults) { try { defaults.afterPropertiesSet(); } catch (final Exception e) { LOGGER.warn("Caught exception", e); return null; } return defaults.getObject(); } protected void setBondFunctionDefaults(final CurrencyInfo i, final BondFunctions.Defaults.CurrencyInfo defaults) { defaults.setRiskFreeCurveName(i.getCurveName("model/bond/riskFree")); defaults.setRiskFreeCurveCalculationConfig(i.getCurveConfiguration("model/bond/riskFree")); defaults.setCreditCurveName(i.getCurveName("model/bond/credit")); defaults.setCreditCurveCalculationConfig(i.getCurveConfiguration("model/bond/credit")); } protected void setBondFunctionDefaults(final BondFunctions.Defaults defaults) { defaults.setPerCurrencyInfo(getCurrencyInfo(new Function1<CurrencyInfo, BondFunctions.Defaults.CurrencyInfo>() { @Override public BondFunctions.Defaults.CurrencyInfo execute(final CurrencyInfo i) { final BondFunctions.Defaults.CurrencyInfo d = new BondFunctions.Defaults.CurrencyInfo(); setBondFunctionDefaults(i, d); return d; } })); } protected FunctionConfigurationSource bondFunctions() { final BondFunctions.Defaults defaults = new BondFunctions.Defaults(); setBondFunctionDefaults(defaults); return getRepository(defaults); } protected void setCDSFunctionDefaults(final CurrencyInfo i, final CreditFunctions.Defaults.CurrencyInfo defaults) { defaults.setCurveCalculationConfig(i.getCurveConfiguration("model/credit/yield")); defaults.setCurveName(i.getCurveName("model/credit/yield")); defaults.setCurveCalculationMethod(i.getCurveCalculationMethodName("model/credit/yield")); defaults.setCurveCalculationMethod(i.getCurveCalculationMethodName("model/credit/hazardrate")); } protected void setCDSFunctionDefaults(final CreditFunctions.Defaults defaults) { defaults.setPerCurrencyInfo(getCurrencyInfo(new Function1<CurrencyInfo, CreditFunctions.Defaults.CurrencyInfo>() { @Override public CreditFunctions.Defaults.CurrencyInfo execute(final CurrencyInfo i) { final CreditFunctions.Defaults.CurrencyInfo d = new CreditFunctions.Defaults.CurrencyInfo(); setCDSFunctionDefaults(i, d); return d; } })); } protected FunctionConfigurationSource cdsFunctions() { final CreditFunctions.Defaults defaults = new CreditFunctions.Defaults(); setCDSFunctionDefaults(defaults); return getRepository(defaults); } protected FunctionConfigurationSource deprecatedFunctions() { return null; } protected void setEquityOptionDefaults(final OptionFunctions.Defaults defaults) { } protected FunctionConfigurationSource equityOptionFunctions() { final OptionFunctions.Defaults defaults = new OptionFunctions.Defaults(); setEquityOptionDefaults(defaults); return getRepository(defaults); } protected void setExternalSensitivitesCalculators(final SensitivitiesFunctions.Calculators calculators) { } protected void setExternalSensitivitiesDefaults(final CurrencyInfo i, final SensitivitiesFunctions.Defaults.CurrencyInfo defaults) { defaults.setCurveConfiguration(i.getCurveConfiguration("model/sensitivities")); } protected void setExternalSensitivitiesDefaults(final SensitivitiesFunctions.Defaults defaults) { defaults.setPerCurrencyInfo(getCurrencyInfo(new Function1<CurrencyInfo, SensitivitiesFunctions.Defaults.CurrencyInfo>() { @Override public SensitivitiesFunctions.Defaults.CurrencyInfo execute(final CurrencyInfo i) { final SensitivitiesFunctions.Defaults.CurrencyInfo d = new SensitivitiesFunctions.Defaults.CurrencyInfo(); setExternalSensitivitiesDefaults(i, d); return d; } })); } protected FunctionConfigurationSource externalSensitivitiesFunctions() { final SensitivitiesFunctions.Calculators calculators = new SensitivitiesFunctions.Calculators(); setExternalSensitivitesCalculators(calculators); final SensitivitiesFunctions.Defaults defaults = new SensitivitiesFunctions.Defaults(); setExternalSensitivitiesDefaults(defaults); return CombiningFunctionConfigurationSource.of(getRepository(calculators), getRepository(defaults)); } protected void setForexDefaults(final CurrencyInfo i, final com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo defaults) { defaults.setCurveConfiguration(i.getCurveConfiguration("model/forex")); defaults.setDiscountingCurve(i.getCurveName("model/forex/discounting")); } protected void setForexDefaults(final CurrencyPairInfo i, final com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions.CurrencyPairInfo defaults) { defaults.setSurfaceName(i.getSurfaceName("model/forex")); defaults.setForwardCurveName(i.getForwardCurveName("model/forex/forward")); } protected void setForexDefaults( final com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions defaults) { defaults.setPerCurrencyInfo( getCurrencyInfo( new Function1<CurrencyInfo, com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo>() { @Override public com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo execute( final CurrencyInfo i) { final com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo d = new com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo(); setForexDefaults(i, d); return d; } })); defaults.setPerCurrencyPairInfo(getCurrencyPairInfo( new Function1<CurrencyPairInfo, com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions.CurrencyPairInfo>() { @Override public com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions.CurrencyPairInfo execute( final CurrencyPairInfo i) { final com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions.CurrencyPairInfo d = new com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions.CurrencyPairInfo(); setForexDefaults(i, d); return d; } })); } protected FunctionConfigurationSource forexFunctions() { final com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions defaults = new com.opengamma.financial.analytics.model.forex.defaultproperties.DefaultPropertiesFunctions(); setForexDefaults(defaults); return getRepository(defaults); } protected void setForwardCurveDefaults(final CurrencyInfo i, final ForwardFunctions.Defaults.CurrencyInfo defaults) { defaults.setCurveConfiguration(i.getCurveConfiguration("model/curve/forward")); defaults.setDiscountingCurve(i.getCurveName("model/curve/forward/discounting")); defaults.setForwardCurve(i.getCurveName("model/curve/forward")); } protected void setForwardCurveDefaults(final CurrencyPairInfo i, final ForwardFunctions.Defaults.CurrencyPairInfo defaults) { defaults.setCurveName(i.getCurveName("model/curve/forward")); } /** * @param defaults * the defaults */ protected void setForwardCurveDefaults(final ForwardFunctions.Defaults defaults) { defaults.setPerCurrencyInfo(getCurrencyInfo(new Function1<CurrencyInfo, ForwardFunctions.Defaults.CurrencyInfo>() { @Override public ForwardFunctions.Defaults.CurrencyInfo execute(final CurrencyInfo i) { final ForwardFunctions.Defaults.CurrencyInfo d = new ForwardFunctions.Defaults.CurrencyInfo(); setForwardCurveDefaults(i, d); return d; } })); defaults.setPerCurrencyPairInfo(getCurrencyPairInfo(new Function1<CurrencyPairInfo, ForwardFunctions.Defaults.CurrencyPairInfo>() { @Override public ForwardFunctions.Defaults.CurrencyPairInfo execute(final CurrencyPairInfo i) { final ForwardFunctions.Defaults.CurrencyPairInfo d = new ForwardFunctions.Defaults.CurrencyPairInfo(); setForwardCurveDefaults(i, d); return d; } })); } /** * @return the function configuration */ protected FunctionConfigurationSource forwardCurveFunctions() { final ForwardFunctions.Defaults defaults = new ForwardFunctions.Defaults(); setForwardCurveDefaults(defaults); return getRepository(defaults); } protected void setFutureDefaults(final CurrencyInfo i, final FutureFunctions.Deprecated.CurrencyInfo defaults) { defaults.setCurveConfiguration(i.getCurveConfiguration("model/future")); } protected void setFutureDefaults(final FutureFunctions.Deprecated defaults) { defaults.setPerCurrencyInfo(getCurrencyInfo(new Function1<CurrencyInfo, FutureFunctions.Deprecated.CurrencyInfo>() { @Override public FutureFunctions.Deprecated.CurrencyInfo execute(final CurrencyInfo i) { final FutureFunctions.Deprecated.CurrencyInfo d = new FutureFunctions.Deprecated.CurrencyInfo(); setFutureDefaults(i, d); return d; } })); } protected void setFutureFunctionCalculators(final FutureFunctions.Calculators calculators) { calculators.setClosingPriceField(getMark2MarketField()); } protected FunctionConfigurationSource futureFunctions() { final FutureFunctions.Calculators calculators = new FutureFunctions.Calculators(); setFutureFunctionCalculators(calculators); final FutureFunctions.Deprecated defaults = new FutureFunctions.Deprecated(); setFutureDefaults(defaults); return CombiningFunctionConfigurationSource.of(getRepository(calculators), getRepository(defaults)); } protected void setFutureOptionDefaults(final CurrencyInfo i, final FutureOptionFunctions.Defaults.CurrencyInfo defaults) { defaults.setCurveName(i.getCurveName("model/futureoption")); defaults.setCurveCalculationConfig(i.getCurveConfiguration("model/futureoption")); defaults.setSurfaceName(i.getSurfaceName("model/futureoption")); defaults.setForwardCurveName(i.getForwardCurveName("model/futureoption")); String v = i.getForwardCurveCalculationMethod("model/futureoption"); if (v != null) { defaults.setForwardCurveCalculationMethodName(v); } v = i.getSurfaceCalculationMethod("model/futureoption"); if (v != null) { defaults.setSurfaceCalculationMethod(v); } } protected void setFutureOptionDefaults(final FutureOptionFunctions.Defaults defaults) { defaults.setPerCurrencyInfo(getCurrencyInfo(new Function1<CurrencyInfo, FutureOptionFunctions.Defaults.CurrencyInfo>() { @Override public FutureOptionFunctions.Defaults.CurrencyInfo execute(final CurrencyInfo i) { final FutureOptionFunctions.Defaults.CurrencyInfo d = new FutureOptionFunctions.Defaults.CurrencyInfo(); setFutureOptionDefaults(i, d); return d; } })); } protected FunctionConfigurationSource futureOptionFunctions() { final FutureOptionFunctions.Defaults defaults = new FutureOptionFunctions.Defaults(); setFutureOptionDefaults(defaults); return getRepository(defaults); } protected FunctionConfigurationSource curveFunctions() { final CurveFunctions.Defaults defaults = new CurveFunctions.Defaults(); setCurveDefaults(defaults); return getRepository(defaults); } protected void setCurveDefaults(final CurveFunctions.Defaults defaults) { defaults.setAbsoluteTolerance(_absoluteTolerance); defaults.setRelativeTolerance(_relativeTolerance); defaults.setMaximumIterations(_maxIterations); } protected void setMultiCurvePricingDefaults(final MultiCurvePricingFunctions.Defaults defaults) { defaults.setAbsoluteTolerance(_absoluteTolerance); defaults.setRelativeTolerance(_relativeTolerance); defaults.setMaximumIterations(_maxIterations); } protected void setIRFutureOptionDefaults(final CurrencyInfo i, final IRFutureOptionFunctions.Defaults.CurrencyInfo defaults) { defaults.setCurveConfiguration(i.getCurveConfiguration("model/irfutureoption")); defaults.setSurfaceName(i.getSurfaceName("model/irfutureoption")); defaults.setCurveName(i.getCurveName("model/irfutureoption")); } protected void setIRFutureOptionDefaults(final IRFutureOptionFunctions.Defaults defaults) { defaults.setPerCurrencyInfo(getCurrencyInfo(new Function1<CurrencyInfo, IRFutureOptionFunctions.Defaults.CurrencyInfo>() { @Override public IRFutureOptionFunctions.Defaults.CurrencyInfo execute(final CurrencyInfo i) { final IRFutureOptionFunctions.Defaults.CurrencyInfo d = new IRFutureOptionFunctions.Defaults.CurrencyInfo(); setIRFutureOptionDefaults(i, d); return d; } })); } protected FunctionConfigurationSource irFutureOptionFunctions() { final IRFutureOptionFunctions.Defaults defaults = new IRFutureOptionFunctions.Defaults(); setIRFutureOptionDefaults(defaults); return getRepository(defaults); } protected void setLocalVolatilityDefaults(final CurrencyInfo i, final com.opengamma.financial.analytics.model.volatility.local.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo defaults) { defaults.setCurveConfiguration(i.getCurveConfiguration("model/volatility/local")); defaults.setDiscountingCurve(i.getCurveName("model/volatility/local/discounting")); } protected void setLocalVolatilityDefaults( final com.opengamma.financial.analytics.model.volatility.local.defaultproperties.DefaultPropertiesFunctions defaults) { defaults.setPerCurrencyInfo(getCurrencyInfo( new Function1<CurrencyInfo, com.opengamma.financial.analytics.model.volatility.local.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo>() { @Override public com.opengamma.financial.analytics.model.volatility.local.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo execute( final CurrencyInfo i) { final com.opengamma.financial.analytics.model.volatility.local.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo d = new com.opengamma.financial.analytics.model.volatility.local.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo(); setLocalVolatilityDefaults(i, d); return d; } })); } protected FunctionConfigurationSource localVolatilityFunctions() { final com.opengamma.financial.analytics.model.volatility.local.defaultproperties.DefaultPropertiesFunctions defaults = new com.opengamma.financial.analytics.model.volatility.local.defaultproperties.DefaultPropertiesFunctions(); setLocalVolatilityDefaults(defaults); return getRepository(defaults); } protected FunctionConfigurationSource multicurvePricingFunctions() { final MultiCurvePricingFunctions.Defaults defaults = new MultiCurvePricingFunctions.Defaults(); setMultiCurvePricingDefaults(defaults); return getRepository(defaults); } protected void setPNLFunctionCalculators(final PNLFunctions.Calculators calculators) { calculators.setCostOfCarryField(getCostOfCarryField()); calculators.setMark2MarketField(getMark2MarketField()); } protected void setPNLFunctionDefaults(final CurrencyInfo i, final PNLFunctions.Defaults.CurrencyInfo defaults) { defaults.setCurveConfiguration(i.getCurveConfiguration("model/pnl")); defaults.setDiscountingCurve(i.getCurveName("model/pnl/discounting")); } protected void setPNLFunctionDefaults(final CurrencyPairInfo i, final PNLFunctions.Defaults.CurrencyPairInfo defaults) { defaults.setSurfaceName(i.getSurfaceName("model/pnl")); } protected void setPNLFunctionDefaults(final PNLFunctions.Defaults defaults) { defaults.setPerCurrencyInfo(getCurrencyInfo(new Function1<CurrencyInfo, PNLFunctions.Defaults.CurrencyInfo>() { @Override public PNLFunctions.Defaults.CurrencyInfo execute(final CurrencyInfo i) { final PNLFunctions.Defaults.CurrencyInfo d = new PNLFunctions.Defaults.CurrencyInfo(); setPNLFunctionDefaults(i, d); return d; } })); defaults.setPerCurrencyPairInfo(getCurrencyPairInfo(new Function1<CurrencyPairInfo, PNLFunctions.Defaults.CurrencyPairInfo>() { @Override public PNLFunctions.Defaults.CurrencyPairInfo execute(final CurrencyPairInfo i) { final PNLFunctions.Defaults.CurrencyPairInfo d = new PNLFunctions.Defaults.CurrencyPairInfo(); setPNLFunctionDefaults(i, d); return d; } })); } protected FunctionConfigurationSource pnlFunctions() { final PNLFunctions.Calculators calculators = new PNLFunctions.Calculators(); setPNLFunctionCalculators(calculators); final PNLFunctions.Defaults defaults = new PNLFunctions.Defaults(); setPNLFunctionDefaults(defaults); return CombiningFunctionConfigurationSource.of(getRepository(calculators), getRepository(defaults)); } protected void setPortfolioTheoryCalculators(final PortfolioTheoryFunctions.Calculators calculators) { } protected void setPortfolioTheoryDefaults(final PortfolioTheoryFunctions.Defaults defaults) { } protected FunctionConfigurationSource portfolioTheoryFunctions() { final PortfolioTheoryFunctions.Calculators calculators = new PortfolioTheoryFunctions.Calculators(); setPortfolioTheoryCalculators(calculators); final PortfolioTheoryFunctions.Defaults defaults = new PortfolioTheoryFunctions.Defaults(); setPortfolioTheoryDefaults(defaults); return CombiningFunctionConfigurationSource.of(getRepository(calculators), getRepository(defaults)); } protected void setVaRDefaults(final VaRFunctions.Defaults defaults) { } protected FunctionConfigurationSource varFunctions() { final VaRFunctions.Defaults defaults = new VaRFunctions.Defaults(); setVaRDefaults(defaults); return getRepository(defaults); } protected void setVolatilitySurfaceDefaults( final com.opengamma.financial.analytics.model.volatility.surface.SurfaceFunctions.Defaults defaults) { } protected void setVolatilitySurfaceBlackDefaults(final CurrencyInfo i, final com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo defaults) { defaults.setCurveName(i.getForwardCurveName("model/volatility/surface/black")); final String v = i.getForwardCurveCalculationMethod("model/volatility/surface/black"); if (v != null) { defaults.setCurveCalculationMethod(v); } defaults.setSurfaceName(i.getSurfaceName("model/volatility/surface/black")); } protected void setVolatilitySurfaceBlackDefaults(final CurrencyPairInfo i, final com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions.CurrencyPairInfo defaults) { defaults.setCurveName(i.getCurveName("model/volatility/surface/black")); final String v = i.getCurveCalculationMethod("model/volatility/surface/black"); if (v != null) { defaults.setCurveCalculationMethod(v); } defaults.setSurfaceName(i.getSurfaceName("model/volatility/surface/black")); } protected void setVolatilitySurfaceBlackDefaults( final com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions defaults) { defaults.setPerCurrencyInfo(getCurrencyInfo( new Function1<CurrencyInfo, com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo>() { @Override public com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo execute( final CurrencyInfo i) { final com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo d = new com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions.CurrencyInfo(); setVolatilitySurfaceBlackDefaults(i, d); return d; } })); defaults.setPerCurrencyPairInfo(getCurrencyPairInfo( new Function1<CurrencyPairInfo, com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions.CurrencyPairInfo>() { @Override public com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions.CurrencyPairInfo execute( final CurrencyPairInfo i) { final com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions.CurrencyPairInfo d = new com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions.CurrencyPairInfo(); setVolatilitySurfaceBlackDefaults(i, d); return d; } })); } protected void setVolatilitySurfaceDefaults( final com.opengamma.financial.analytics.volatility.surface.SurfaceFunctions.Defaults defaults) { } protected FunctionConfigurationSource volatilitySurfaceFunctions() { final com.opengamma.financial.analytics.model.volatility.surface.SurfaceFunctions.Defaults d1 = new com.opengamma.financial.analytics.model.volatility.surface.SurfaceFunctions.Defaults(); setVolatilitySurfaceDefaults(d1); final com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions d2 = new com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.DefaultPropertiesFunctions(); setVolatilitySurfaceBlackDefaults(d2); final com.opengamma.financial.analytics.volatility.surface.SurfaceFunctions.Defaults d3 = new com.opengamma.financial.analytics.volatility.surface.SurfaceFunctions.Defaults(); setVolatilitySurfaceDefaults(d3); return CombiningFunctionConfigurationSource.of(getRepository(d1), getRepository(d2), getRepository(d3)); } @Override protected FunctionConfigurationSource createObject() { return CombiningFunctionConfigurationSource.of(super.createObject(), bondFunctions(), cdsFunctions(), deprecatedFunctions(), equityOptionFunctions(), externalSensitivitiesFunctions(), forexFunctions(), forwardCurveFunctions(), futureFunctions(), futureOptionFunctions(), irFutureOptionFunctions(), localVolatilityFunctions(), pnlFunctions(), portfolioTheoryFunctions(), varFunctions(), volatilitySurfaceFunctions()); } }
package com.github.shemhazai.mprw.domain; import com.github.shemhazai.mprw.repo.UserRepository; import com.github.shemhazai.mprw.utils.HashGenerator; import com.github.shemhazai.mprw.utils.UserValidator; public class UserUpdateRequest { private String loginEmail; private String loginPassword; private String firstName; private String lastName; private String email; private String password; private String phone; private Boolean phoneAlert; private Boolean mailAlert; public String getLoginEmail() { return loginEmail; } public String getLoginPassword() { return loginPassword; } public String getFirstName() { return firstName; } public String getLastName() { return lastName; } public String getEmail() { return email; } public String getPassword() { return password; } public String getPhone() { return phone; } public Boolean getPhoneAlert() { return phoneAlert; } public Boolean getMailAlert() { return mailAlert; } public void setLoginEmail(String loginEmail) { this.loginEmail = loginEmail; } public void setLoginPassword(String loginPassword) { this.loginPassword = loginPassword; } public void setFirstName(String firstName) { this.firstName = firstName; } public void setLastName(String lastName) { this.lastName = lastName; } public void setEmail(String email) { this.email = email; } public void setPassword(String password) { this.password = password; } public void setPhone(String phone) { this.phone = phone; } public void setPhoneAlert(Boolean phoneAlert) { this.phoneAlert = phoneAlert; } public void setMailAlert(Boolean mailAlert) { this.mailAlert = mailAlert; } public int countFieldsToUpdate() { int fieldsToUpdate = 0; if (email != null) fieldsToUpdate++; if (password != null) fieldsToUpdate++; if (firstName != null) fieldsToUpdate++; if (lastName != null) fieldsToUpdate++; if (phone != null) fieldsToUpdate++; if (phoneAlert != null) fieldsToUpdate++; if (mailAlert != null) fieldsToUpdate++; return fieldsToUpdate; } public int countValidatedFields() { UserValidator validator = new UserValidator(); int validatedFields = 0; if (email != null && validator.validateEmail(email)) validatedFields++; if (password != null && validator.validatePassword(password)) validatedFields++; if (firstName != null && validator.validateName(firstName)) validatedFields++; if (lastName != null && validator.validateName(lastName)) validatedFields++; if (phone != null && validator.validatePhone(phone)) validatedFields++; if (mailAlert != null) validatedFields++; if (phoneAlert != null) validatedFields++; return validatedFields; } public void updateUserFromRepository(UserRepository userRepository) { User user = userRepository.selectUserByEmail(loginEmail); if (email != null) userRepository.updateUserEmail(user.getId(), email); if (password != null) { HashGenerator hasher = new HashGenerator(); userRepository.updateUserPasswordHash(user.getId(), hasher.hash(password)); } if (firstName != null) userRepository.updateUserFirstName(user.getId(), firstName); if (lastName != null) userRepository.updateUserLastName(user.getId(), lastName); if (phone != null) userRepository.updateUserPhone(user.getId(), phone); if (phoneAlert != null) userRepository.updateUserPhoneAlert(user.getId(), phoneAlert); if (mailAlert != null) userRepository.updateUserMailAlert(user.getId(), mailAlert); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((email == null) ? 0 : email.hashCode()); result = prime * result + ((firstName == null) ? 0 : firstName.hashCode()); result = prime * result + ((lastName == null) ? 0 : lastName.hashCode()); result = prime * result + ((loginEmail == null) ? 0 : loginEmail.hashCode()); result = prime * result + ((loginPassword == null) ? 0 : loginPassword.hashCode()); result = prime * result + ((mailAlert == null) ? 0 : mailAlert.hashCode()); result = prime * result + ((password == null) ? 0 : password.hashCode()); result = prime * result + ((phone == null) ? 0 : phone.hashCode()); result = prime * result + ((phoneAlert == null) ? 0 : phoneAlert.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; UserUpdateRequest other = (UserUpdateRequest) obj; if (email == null) { if (other.email != null) return false; } else if (!email.equals(other.email)) return false; if (firstName == null) { if (other.firstName != null) return false; } else if (!firstName.equals(other.firstName)) return false; if (lastName == null) { if (other.lastName != null) return false; } else if (!lastName.equals(other.lastName)) return false; if (loginEmail == null) { if (other.loginEmail != null) return false; } else if (!loginEmail.equals(other.loginEmail)) return false; if (loginPassword == null) { if (other.loginPassword != null) return false; } else if (!loginPassword.equals(other.loginPassword)) return false; if (mailAlert == null) { if (other.mailAlert != null) return false; } else if (!mailAlert.equals(other.mailAlert)) return false; if (password == null) { if (other.password != null) return false; } else if (!password.equals(other.password)) return false; if (phone == null) { if (other.phone != null) return false; } else if (!phone.equals(other.phone)) return false; if (phoneAlert == null) { if (other.phoneAlert != null) return false; } else if (!phoneAlert.equals(other.phoneAlert)) return false; return true; } @Override public String toString() { return "UserUpdateRequest [loginEmail=" + loginEmail + ", loginPassword=" + loginPassword + ", firstName=" + firstName + ", lastName=" + lastName + ", email=" + email + ", password=" + password + ", phone=" + phone + ", phoneAlert=" + phoneAlert + ", mailAlert=" + mailAlert + "]"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.sparql.algebra.optimize; import java.util.* ; import org.apache.jena.JenaRuntime ; import org.apache.jena.atlas.lib.Pair ; import org.apache.jena.graph.Node ; import org.apache.jena.query.ARQ ; import org.apache.jena.rdf.model.impl.Util ; import org.apache.jena.sparql.algebra.Op ; import org.apache.jena.sparql.algebra.OpVars ; import org.apache.jena.sparql.algebra.TransformCopy ; import org.apache.jena.sparql.algebra.op.* ; import org.apache.jena.sparql.core.Substitute ; import org.apache.jena.sparql.core.Var ; import org.apache.jena.sparql.core.VarExprList ; import org.apache.jena.sparql.expr.* ; /** * A transform that aims to optimize queries where there is an equality * constraint on a variable to speed up evaluation e.g * * <pre> * SELECT * * WHERE * { * ?s ?p ?o . * FILTER(?s = &lt;http://subject&gt;) * } * </pre> * * Would transform to the following: * * <pre> * SELECT * * WHERE * { * &lt;http://subject&gt; ?p ?o . * BIND(&lt;http://subject&gt; AS ?s) * } * </pre> * * <h3>Applicability</h3> * <p> * This optimizer is conservative in that it only makes the optimization where * the equality constraint is against a non-literal as otherwise substituting * the value changes the query semantics because it switches from value equality * to the more restrictive term equality. The optimization is safe for * non-literals because for those value and term equality are equivalent (in * fact value equality is defined to be term equality). * </p> * <p> * There are also various nested algebra structures that can make the * optimization unsafe and so it does not take place if any of those situations * is detected. * </p> */ public class TransformFilterEquality extends TransformCopy { // The approach taken for { OPTIONAL{} OPTIONAL{} } is more general ... // and better? Still need to be careful of double-nested OPTIONALS as // intermediates of a different value can block overall results so // don't mask immediately. public TransformFilterEquality() { } @Override public Op transform(OpFilter opFilter, Op subOp) { Op op = apply(opFilter.getExprs(), subOp); if (op == null) return super.transform(opFilter, subOp); return op; } private static Op apply(ExprList exprs, Op subOp) { // ---- Find and extract any equality filters. Pair<List<Pair<Var, NodeValue>>, ExprList> p = preprocessFilterEquality(exprs); if (p == null || p.getLeft().size() == 0) return null; List<Pair<Var, NodeValue>> equalities = p.getLeft(); Collection<Var> varsMentioned = varsMentionedInEqualityFilters(equalities); ExprList remaining = p.getRight(); // If any of the conditions overlap the optimization is unsafe // (the query is also likely incorrect but that isn't our problem) // TODO There is actually a special case here, if the equality // constraints are conflicting then we can special case to table empty. // At the very least we should check for the case where an equality // condition is duplicated if (varsMentioned.size() < equalities.size()) return null; // ---- Check if the subOp is the right shape to transform. // Special case : deduce that the filter will always "eval unbound" // hence eliminate all rows. Return the empty table. if (testSpecialCaseUnused(subOp, equalities, remaining)) // JENA-1184 // If this is run after join-strategy, then scope is not a simple matter // of looking at the subOp. But running before join-strategy // causes other code to not optimize (presumablty because it was developed // to run after join-strategy probably by conincidence) // @Test TestTransformFilters.equality04 //return OpTable.empty(); // JENA-1184 woraround. Return unchanged. return null ; // Special case: the deep left op of a OpConditional/OpLeftJoin is the unit table. // This is the case of: // { OPTIONAL{P1} OPTIONAL{P2} ... FILTER(?x = :x) } if (testSpecialCase1(subOp, equalities, remaining)) { // Find backbone of ops List<Op> ops = extractOptionals(subOp); ops = processSpecialCase1(ops, equalities); // Put back together Op op = rebuild((Op2) subOp, ops); // Put all filters - either we optimized, or we left alone. // Either way, the complete set of filter expressions. op = OpFilter.filterBy(exprs, op); return op; } // ---- Transform Op op = subOp; if (!safeToTransform(varsMentioned, op)) return null; for (Pair<Var, NodeValue> equalityTest : equalities) op = processFilterWorker(op, equalityTest.getLeft(), equalityTest.getRight()); // ---- Place any filter expressions around the processed sub op. if (remaining.size() > 0) op = OpFilter.filterBy(remaining, op); return op; } // --- find and extract private static Pair<List<Pair<Var, NodeValue>>, ExprList> preprocessFilterEquality(ExprList exprs) { List<Pair<Var, NodeValue>> exprsFilterEquality = new ArrayList<>(); ExprList exprsOther = new ExprList(); for (Expr e : exprs.getList()) { Pair<Var, NodeValue> p = preprocess(e); if (p != null) exprsFilterEquality.add(p); else exprsOther.add(e); } if (exprsFilterEquality.size() == 0) return null; return Pair.create(exprsFilterEquality, exprsOther); } private static Pair<Var, NodeValue> preprocess(Expr e) { if (!(e instanceof E_Equals) && !(e instanceof E_SameTerm)) return null; ExprFunction2 eq = (ExprFunction2) e; Expr left = eq.getArg1(); Expr right = eq.getArg2(); Var var = null; NodeValue constant = null; if (left.isVariable() && right.isConstant()) { var = left.asVar(); constant = right.getConstant(); } else if (right.isVariable() && left.isConstant()) { var = right.asVar(); constant = left.getConstant(); } if (var == null || constant == null) return null; if ( constant.isIRI() || constant.isBlank() ) return Pair.create(var, constant); // Literals. Without knowing more, .equals is not the same as // SPARQL "=" (or .sameValueAs). // In RDF 1.1, it is true of xsd:strings. if (e instanceof E_SameTerm) { if ( ! JenaRuntime.isRDF11 ) { // Corner case: sameTerm is false for string/plain literal, // but true in the in-memory graph for graph matching. // All becomes a non-issue in RDF 1.1 if (!ARQ.isStrictMode() && constant.isString()) return null; } return Pair.create(var, constant); } // At this point, (e instanceof E_Equals) // 'constant' can be a folded expression - no node yet - so use asNode. Node n = constant.asNode() ; if ( JenaRuntime.isRDF11 ) { // RDF 1.1 : simple literals are xsd:strings. if ( Util.isSimpleString(n) ) return Pair.create(var, constant); } // Otherwise, lexical forms are not 1-1 with values so not safe. // e.g. +001 and 1 are both integer value but different terms. return null ; } private static Collection<Var> varsMentionedInEqualityFilters(List<Pair<Var, NodeValue>> equalities) { Set<Var> vars = new HashSet<>(); for (Pair<Var, NodeValue> p : equalities) vars.add(p.getLeft()); return vars; } private static boolean safeToTransform(Collection<Var> varsEquality, Op op) { // Structure as a visitor? if (op instanceof OpBGP || op instanceof OpQuadPattern) return true; if (op instanceof OpPath ) return true; if (op instanceof OpFilter) { OpFilter opf = (OpFilter) op; // Expressions are always safe transform by substitution. return safeToTransform(varsEquality, opf.getSubOp()); } // This will be applied also in sub-calls of the Transform but queries // are very rarely so deep that it matters. if (op instanceof OpSequence) { OpN opN = (OpN) op; for (Op subOp : opN.getElements()) { if (!safeToTransform(varsEquality, subOp)) return false; } return true; } if (op instanceof OpJoin || op instanceof OpUnion) { Op2 op2 = (Op2) op; return safeToTransform(varsEquality, op2.getLeft()) && safeToTransform(varsEquality, op2.getRight()); } // Not safe unless filter variables are mentioned on the LHS. if (op instanceof OpConditional || op instanceof OpLeftJoin) { Op2 opleftjoin = (Op2) op; if (!safeToTransform(varsEquality, opleftjoin.getLeft()) || !safeToTransform(varsEquality, opleftjoin.getRight())) return false; // Not only must the left and right be safe to transform, // but the equality variable must be known to be always set. // If the varsLeft are disjoint from assigned vars, // we may be able to push assign down right // (this generalises the unit table case specialcase1) // Needs more investigation. Op opLeft = opleftjoin.getLeft(); Set<Var> varsLeft = OpVars.visibleVars(opLeft); if (varsLeft.containsAll(varsEquality)) return true; return false; } if (op instanceof OpGraph) { OpGraph opg = (OpGraph) op; return safeToTransform(varsEquality, opg.getSubOp()); } // Subquery - assume scope rewriting has already been applied. if (op instanceof OpModifier) { // ORDER BY? OpModifier opMod = (OpModifier) op; if (opMod instanceof OpProject) { OpProject opProject = (OpProject) op; // Writing "SELECT ?var" for "?var" -> a value would need // AS-ification. for (Var v : opProject.getVars()) { if (varsEquality.contains(v)) return false; } } return safeToTransform(varsEquality, opMod.getSubOp()); } if (op instanceof OpGroup) { OpGroup opGroup = (OpGroup) op; VarExprList varExprList = opGroup.getGroupVars(); return safeToTransform(varsEquality, varExprList) && safeToTransform(varsEquality, opGroup.getSubOp()); } if (op instanceof OpTable) { OpTable opTable = (OpTable) op; if (opTable.isJoinIdentity()) return true; } // Op1 - OpGroup // Op1 - OpOrder // Op1 - OpAssign, OpExtend // Op1 - OpFilter - done. // Op1 - OpLabel - easy // Op1 - OpService - no. return false; } private static boolean safeToTransform(Collection<Var> varsEquality, VarExprList varsExprList) { // If the named variable is used, unsafe to rewrite. return Collections.disjoint(varsExprList.getVars(), varsEquality); } // -- A special case private static boolean testSpecialCaseUnused(Op op, List<Pair<Var, NodeValue>> equalities, ExprList remaining) { // If the op does not contain the var at all, for some equality // then the filter expression will be "eval unbound" i.e. false. // We can return empty table. Set<Var> patternVars = OpVars.visibleVars(op); for (Pair<Var, NodeValue> p : equalities) { if (!patternVars.contains(p.getLeft())) return true; } return false; } // If a sequence of OPTIONALS, and nothing prior to the first, we end up // with a unit table on the left side of a next of LeftJoin/conditionals. private static boolean testSpecialCase1(Op op, List<Pair<Var, NodeValue>> equalities, ExprList remaining) { while (op instanceof OpConditional || op instanceof OpLeftJoin) { Op2 opleftjoin2 = (Op2) op; op = opleftjoin2.getLeft(); } return isUnitTable(op); } private static List<Op> extractOptionals(Op op) { List<Op> chain = new ArrayList<>(); while (op instanceof OpConditional || op instanceof OpLeftJoin) { Op2 opleftjoin2 = (Op2) op; chain.add(opleftjoin2.getRight()); op = opleftjoin2.getLeft(); } return chain; } private static List<Op> processSpecialCase1(List<Op> ops, List<Pair<Var, NodeValue>> equalities) { List<Op> ops2 = new ArrayList<>(); Collection<Var> vars = varsMentionedInEqualityFilters(equalities); for (Op op : ops) { Op op2 = op; if (safeToTransform(vars, op)) { for (Pair<Var, NodeValue> p : equalities) op2 = processFilterWorker(op, p.getLeft(), p.getRight()); } ops2.add(op2); } return ops2; } private static Op rebuild(Op2 subOp, List<Op> ops) { Op chain = OpTable.unit(); for (Op op : ops) { chain = subOp.copy(chain, op); } return chain; } private static boolean isUnitTable(Op op) { if (op instanceof OpTable) { if (((OpTable) op).isJoinIdentity()) return true; } return false; } // ---- Transformation private static Op processFilterWorker(Op op, Var var, NodeValue constant) { return subst(op, var, constant); } private static Op subst(Op subOp, Var var, NodeValue nv) { Op op = Substitute.substitute(subOp, var, nv.asNode()); return OpAssign.assign(op, var, nv); } // Helper for TransformFilterDisjunction. /** Apply the FilterEquality transform or return null if no change */ static Op processFilter(Expr e, Op subOp) { return apply(new ExprList(e), subOp); } }
package org.hisp.dhis.interpretation.impl; /* * Copyright (c) 2004-2017, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.hisp.dhis.chart.Chart; import org.hisp.dhis.common.CodeGenerator; import org.hisp.dhis.interpretation.Interpretation; import org.hisp.dhis.interpretation.InterpretationComment; import org.hisp.dhis.interpretation.InterpretationService; import org.hisp.dhis.interpretation.InterpretationStore; import org.hisp.dhis.mapping.Map; import org.hisp.dhis.period.PeriodService; import org.hisp.dhis.reporttable.ReportTable; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.user.User; import org.hisp.dhis.user.UserService; import org.springframework.transaction.annotation.Isolation; import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; /** * @author Lars Helge Overland */ @Transactional public class DefaultInterpretationService implements InterpretationService { // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private InterpretationStore interpretationStore; public void setInterpretationStore( InterpretationStore interpretationStore ) { this.interpretationStore = interpretationStore; } private CurrentUserService currentUserService; public void setCurrentUserService( CurrentUserService currentUserService ) { this.currentUserService = currentUserService; } private UserService userService; public void setUserService( UserService userService ) { this.userService = userService; } private PeriodService periodService; public void setPeriodService( PeriodService periodService ) { this.periodService = periodService; } // ------------------------------------------------------------------------- // InterpretationService implementation // ------------------------------------------------------------------------- @Override public int saveInterpretation( Interpretation interpretation ) { User user = currentUserService.getCurrentUser(); if ( interpretation != null ) { if ( user != null ) { interpretation.setUser( user ); } if ( interpretation.getPeriod() != null ) { interpretation.setPeriod( periodService.reloadPeriod( interpretation.getPeriod() ) ); } interpretation.updateSharing(); } return interpretationStore.save( interpretation ); } @Override public Interpretation getInterpretation( int id ) { return interpretationStore.get( id ); } @Override public Interpretation getInterpretation( String uid ) { return interpretationStore.getByUid( uid ); } @Override public void updateInterpretation( Interpretation interpretation ) { interpretationStore.update( interpretation ); } @Override public void deleteInterpretation( Interpretation interpretation ) { interpretationStore.delete( interpretation ); } @Override public List<Interpretation> getInterpretations() { return interpretationStore.getAll(); } @Override public List<Interpretation> getInterpretations( Date lastUpdated ) { return interpretationStore.getAllGeLastUpdated( lastUpdated ); } @Override public List<Interpretation> getInterpretations( int first, int max ) { return interpretationStore.getAllOrderedLastUpdated( first, max ); } @Override public InterpretationComment addInterpretationComment( String uid, String text ) { Interpretation interpretation = getInterpretation( uid ); User user = currentUserService.getCurrentUser(); InterpretationComment comment = new InterpretationComment( text ); comment.setLastUpdated( new Date() ); comment.setUid( CodeGenerator.generateCode() ); if ( user != null ) { comment.setUser( user ); } interpretation.addComment( comment ); interpretationStore.update( interpretation ); return comment; } @Override public void updateCurrentUserLastChecked() { User user = currentUserService.getCurrentUser(); user.setLastCheckedInterpretations( new Date() ); userService.updateUser( user ); } @Override public long getNewInterpretationCount() { User user = currentUserService.getCurrentUser(); long count = 0; if ( user != null && user.getLastCheckedInterpretations() != null ) { count = interpretationStore.getCountGeLastUpdated( user.getLastCheckedInterpretations() ); } else { count = interpretationStore.getCount(); } return count; } @Transactional( isolation = Isolation.REPEATABLE_READ ) public boolean likeInterpretation( int id ) { Interpretation interpretation = getInterpretation( id ); if ( interpretation == null ) { return false; } User user = currentUserService.getCurrentUser(); if ( user == null ) { return false; } return interpretation.like( user ); } @Transactional( isolation = Isolation.REPEATABLE_READ ) public boolean unlikeInterpretation( int id ) { Interpretation interpretation = getInterpretation( id ); if ( interpretation == null ) { return false; } User user = currentUserService.getCurrentUser(); if ( user == null ) { return false; } return interpretation.unlike( user ); } @Override public int countMapInterpretations( Map map ) { return interpretationStore.countMapInterpretations( map ); } @Override public int countChartInterpretations( Chart chart ) { return interpretationStore.countChartInterpretations( chart ); } @Override public int countReportTableInterpretations( ReportTable reportTable ) { return interpretationStore.countReportTableInterpretations( reportTable ); } @Override public Interpretation getInterpretationByChart( int id ) { return interpretationStore.getByChartId( id ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. The ASF licenses this file to You * under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. For additional information regarding * copyright in this work, please see the NOTICE file in the top level * directory of this distribution. */ package org.apache.abdera.protocol.server.impl; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.abdera.protocol.Request; import org.apache.abdera.protocol.Resolver; import org.apache.abdera.protocol.server.RequestContext; import org.apache.abdera.protocol.server.Target; import org.apache.abdera.protocol.server.TargetType; /** * <p> * Provides a utility class helpful for determining which type of resource the client is requesting. Each resource type * (e.g. service doc, collection, entry, edit uri, media resource, etc) is assigned a regex pattern. Given the request * URI (path and querystring), this will determine which resource was selected and return an appropriate TargetMatcher. * TargetMatcher is essentially just a simplified version of the java.util.regex.Matcher that also specifies the * Resource Type. * </p> * * <pre> * RequestContext request = ... * RegexTargetResolver tr = new RegexTargetResolver(); * tr.setPattern("/atom",ResourceType.INTROSPECTION) * .setPattern("/atom/([^/#?]+)",ResourceType.COLLECTION) * .setPattern("/atom/([^/#?]+)/([^/#?]+)",ResourceType.ENTRY) * .setPattern("/atom/([^/#?]+)/([^/#?]+)\\?edit",ResourceType.ENTRY_EDIT) * .setPattern("/atom/([^/#?]+)/([^/#?]+)\\?media",ResourceType.MEDIA) * .setPattern("/atom/([^/#?]+)/([^/#?]+)\\?edit-media",ResourceType.MEDIA_EDIT); * * Target target = tr.resolve(request); * System.out.println(target.getType()); * System.out.println(targer.getParameter("foo")); * </pre> */ public class RegexTargetResolver implements Resolver<Target> { protected final Map<Pattern, TargetType> patterns; protected final Map<Pattern, String[]> fields; public RegexTargetResolver() { this.patterns = new LinkedHashMap<Pattern, TargetType>(); this.fields = new LinkedHashMap<Pattern, String[]>(); } public RegexTargetResolver(Map<String, TargetType> patterns) { this.patterns = new HashMap<Pattern, TargetType>(); this.fields = new HashMap<Pattern, String[]>(); for (String p : patterns.keySet()) { TargetType type = patterns.get(p); setPattern(p, type); } } public RegexTargetResolver setPattern(String pattern, TargetType type) { return setPattern(pattern, type, new String[0]); } public RegexTargetResolver setPattern(String pattern, TargetType type, String... fields) { Pattern p = Pattern.compile(pattern); this.patterns.put(p, type); this.fields.put(p, fields); return this; } public Target resolve(Request request) { RequestContext context = (RequestContext)request; String uri = context.getTargetPath(); for (Pattern pattern : patterns.keySet()) { Matcher matcher = pattern.matcher(uri); if (matcher.matches()) { TargetType type = this.patterns.get(pattern); String[] fields = this.fields.get(pattern); return getTarget(type, context, matcher, fields); } } return null; } protected Target getTarget(TargetType type, RequestContext request, Matcher matcher, String[] fields) { return new RegexTarget(type, request, matcher, fields); } public String toString() { StringBuilder buf = new StringBuilder(); buf.append("Regex Target Resolver:\n"); for (Pattern pattern : patterns.keySet()) { TargetType type = this.patterns.get(pattern); String[] fields = this.fields.get(pattern); buf.append(pattern.toString() + ", Type: " + type + ", Fields: " + Arrays.toString(fields)); } return buf.toString(); } public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((fields == null) ? 0 : fields.hashCode()); result = prime * result + ((patterns == null) ? 0 : patterns.hashCode()); return result; } public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; final RegexTargetResolver other = (RegexTargetResolver)obj; if (fields == null) { if (other.fields != null) return false; } else if (!fields.equals(other.fields)) return false; if (patterns == null) { if (other.patterns != null) return false; } else if (!patterns.equals(other.patterns)) return false; return true; } public static class RegexTarget extends SimpleTarget implements Target { private static final long serialVersionUID = 165211244926064449L; protected Matcher matcher; protected String[] fields; public RegexTarget(TargetType type, RequestContext context, Matcher matcher, String[] fields) { super(type, context); this.matcher = matcher; this.fields = fields; } public String getParameter(String name) { if (fields == null) return super.getParameter(name); int idx = 0; for (int n = 0; n < fields.length; n++) if (fields[n].equalsIgnoreCase(name)) idx = n + 1; return idx > 0 && idx <= matcher.groupCount() ? matcher.group(idx) : super.getParameter(name); } public String[] getParameterNames() { String[] names = super.getParameterNames(); List<String> list = new ArrayList<String>(); if (names != null) list.addAll(Arrays.asList(names)); if (fields != null) list.addAll(Arrays.asList(fields)); return list.toArray(new String[list.size()]); } @Override public <T> T getMatcher() { return (T)matcher.pattern(); } @Override public int hashCode() { final int PRIME = 31; int result = 1; String m = matcher.group(0); String p = matcher.pattern().pattern(); result = PRIME * result + super.hashCode(); result = PRIME * result + ((m == null) ? 0 : m.hashCode()); result = PRIME * result + ((p == null) ? 0 : p.hashCode()); result = PRIME * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; final RegexTarget other = (RegexTarget)obj; String m = matcher.group(0); String p = matcher.pattern().pattern(); String m2 = other.matcher.group(0); String p2 = other.matcher.pattern().pattern(); if (!super.equals(obj)) return false; if (m == null) { if (m2 != null) return false; } else if (!m.equals(m2)) return false; if (p == null) { if (p2 != null) return false; } else if (!p.equals(p2)) return false; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } public String toString() { String m = matcher.group(0); String p = matcher.pattern().pattern(); StringBuilder buf = new StringBuilder(); buf.append("RegexTarget[").append(p).append(" ==> ").append(m).append("] = ").append(type.toString()) .append("\n"); String[] params = getParameterNames(); for (String param : params) { buf.append(" ").append(param).append(" = ").append(getParameter(param)).append("\n"); } return buf.toString(); } public String getIdentity() { return context.getUri().toString(); } } }
/** * Copyright 2011-2021 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.m3bp.mirror.jna; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.asakusafw.dag.api.processor.ProcessorContext; import com.asakusafw.lang.utils.common.Arguments; import com.asakusafw.lang.utils.common.Invariants; import com.asakusafw.lang.utils.common.Lang; import com.asakusafw.lang.utils.common.Optionals; import com.asakusafw.lang.utils.common.RunnableWithException; import com.asakusafw.m3bp.mirror.ConfigurationMirror; import com.asakusafw.m3bp.mirror.FlowGraphMirror; import com.asakusafw.m3bp.mirror.Movement; import com.asakusafw.m3bp.mirror.PortMirror; import com.asakusafw.m3bp.mirror.VertexMirror; import com.asakusafw.utils.graph.Graph; import com.asakusafw.utils.graph.Graphs; /** * Executes {@link FlowGraphMirror}. */ public class FlowGraphExecutor implements RunnableWithException<Exception> { static final Logger LOG = LoggerFactory.getLogger(FlowGraphExecutor.class); private final ProcessorContext context; private final FlowGraphMirror graph; final ConfigurationMirror configuration; final Function<String, ? extends BufferComparator> comparators; /** * Creates a new instance. * @param context the root context * @param graph the target flow graph * @param configuration the current configuration * @param comparators the comparators */ public FlowGraphExecutor( ProcessorContext context, FlowGraphMirror graph, ConfigurationMirror configuration, Function<String, ? extends BufferComparator> comparators) { Arguments.requireNonNull(context); Arguments.requireNonNull(graph); Arguments.requireNonNull(configuration); this.context = context; this.graph = graph; this.configuration = configuration; this.comparators = comparators; } @Override public void run() throws IOException, InterruptedException { Graph<VertexMirror> dependencies = Lang.let(Graphs.newInstance(), g -> graph.getVertices().stream() .peek(g::addNode) .forEach(v -> v.getInputs().stream() .forEach(p -> p.getOpposites().stream() .map(o -> o.getOwner()) .forEach(o -> g.addEdge(v, o))))); try (ThreadPool threads = new ThreadPool(); IO io = new IO()) { for (VertexMirror vertex : Graphs.sortPostOrder(dependencies)) { VertexExecutor executor = new VertexExecutor( context, vertex, io, threads.executor, configuration.getMaxConcurrency()); executor.run(); io.resolve(vertex); } } } private class ThreadPool implements AutoCloseable { final ExecutorService executor; ThreadPool() { AtomicInteger counter = new AtomicInteger(); this.executor = Executors.newFixedThreadPool( configuration.getMaxConcurrency(), r -> Lang.let(new Thread(r), t -> { t.setName(String.format("work-%d", counter.incrementAndGet())); //$NON-NLS-1$ t.setDaemon(true); })); } @Override public void close() { executor.shutdownNow(); } } private class IO implements IoMap, AutoCloseable { final Map<PortMirror, EdgeProcessor> inputs = new HashMap<>(); final Map<PortMirror, List<OutputBufferFragment>> outputs = new HashMap<>(); private final Set<PortMirror> finishedInputs = new HashSet<>(); private final Set<PortMirror> finishedOutputs = new HashSet<>(); IO() { return; } @Override public Supplier<OutputBufferFragment> getOutputSource(PortMirror port) { return () -> new OutputBufferFragment( configuration.getOutputBufferSize(), configuration.getOutputRecordsPerBuffer(), port.hasKey()); } @Override public Consumer<OutputBufferFragment> getOutputSink(PortMirror port) { List<OutputBufferFragment> buffers; synchronized (outputs) { Invariants.require(finishedOutputs.contains(port) == false); buffers = outputs.computeIfAbsent(port, p -> Collections.synchronizedList(new ArrayList<>())); } return buffers::add; } @Override public List<InputBufferCursor> getInputSource(PortMirror port) { EdgeProcessor edge; synchronized (inputs) { Invariants.require(finishedInputs.contains(port) == false); Invariants.require(inputs.containsKey(port)); edge = inputs.get(port); } return edge.process(); } void resolve(VertexMirror vertex) { synchronized (inputs) { vertex.getInputs().stream() .filter(p -> p.getMovement() != Movement.NOTHING) .forEach(p -> { Invariants.require(finishedInputs.contains(p) == false); finishedInputs.add(p); inputs.remove(p); }); } Map<PortMirror, List<OutputBufferFragment>> scoped = Lang.let(new HashMap<>(), map -> { synchronized (outputs) { vertex.getOutputs().stream() .filter(p -> p.getMovement() != Movement.NOTHING) .forEach(p -> { Invariants.require(finishedOutputs.contains(p) == false); finishedOutputs.add(p); map.put(p, Optionals.remove(outputs, p).orElse(Collections.emptyList())); }); } }); scoped.forEach(this::resolve); } private void resolve(PortMirror output, List<OutputBufferFragment> results) { synchronized (inputs) { if (LOG.isDebugEnabled()) { LOG.debug("resolving edge: {}.{} ({})", //$NON-NLS-1$ output.getOwner().getName(), output.getName(), output.getMovement()); } for (PortMirror input : output.getOpposites()) { if (LOG.isDebugEnabled()) { LOG.debug(" -> {}.{}", //$NON-NLS-1$ input.getOwner().getName(), input.getName()); } Invariants.require(finishedInputs.contains(input) == false); EdgeProcessor processor = inputs.computeIfAbsent(input, p -> { switch (output.getMovement()) { case ONE_TO_ONE: return new MoveEdgeProcessor(); case BROADCAST: return new BroadcastEdgeProcessor(); case SCATTER_GATHER: return new ScatterGatherProcessor( configuration.getPartitionCount(), resolveComparator(output.getValueComparatorName())); default: throw new AssertionError(output); } }); processor.add(results); } } } private BufferComparator resolveComparator(String functionName) { if (functionName == null) { return null; } Invariants.requireNonNull(comparators); return comparators.apply(functionName); } @Override public void close() { return; } } }
package org.stagemonitor.core; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.ServiceLoader; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.SharedMetricRegistries; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.stagemonitor.core.configuration.Configuration; import org.stagemonitor.core.configuration.source.ConfigurationSource; import org.stagemonitor.core.instrument.MainStagemonitorClassFileTransformer; public final class Stagemonitor { public static final String STAGEMONITOR_PASSWORD = "stagemonitor.password"; private static Logger logger = LoggerFactory.getLogger(Stagemonitor.class); private static Configuration configuration; private static volatile boolean started; private static volatile boolean disabled; private static volatile MeasurementSession measurementSession; private static List<String> pathsOfWidgetMetricTabPlugins = Collections.emptyList(); private static List<String> pathsOfWidgetTabPlugins = Collections.emptyList(); private static Iterable<StagemonitorPlugin> plugins; private static List<Runnable> onShutdownActions = new LinkedList<Runnable>(); static { try { reset(); } catch (Throwable e) { logger.error(e.getMessage(), e); throw new RuntimeException(e); } } private Stagemonitor() { } /** * Just makes sure the static initializer is executed */ public static void init() { // intentionally left blank } public synchronized static void setMeasurementSession(MeasurementSession measurementSession) { if (!getConfiguration(CorePlugin.class).isStagemonitorActive()) { logger.info("stagemonitor is deactivated"); disabled = true; } if (started || disabled) { return; } Stagemonitor.measurementSession = measurementSession; } public static Future<?> startMonitoring() { ExecutorService startupThread = Executors.newSingleThreadExecutor(); try { return startupThread.submit(new Runnable() { @Override public void run() { doStartMonitoring(); } }); } finally { startupThread.shutdown(); } } public synchronized static Future<?> startMonitoring(MeasurementSession measurementSession) { setMeasurementSession(measurementSession); return startMonitoring(); } private synchronized static void doStartMonitoring() { if (measurementSession.isInitialized() && !started) { logger.info("Measurement Session is initialized: " + measurementSession); try { start(); } catch (RuntimeException e) { logger.warn("Error while trying to start monitoring. (this exception is ignored)", e); } } else { logger.warn("Measurement Session is not initialized: {}", measurementSession); logger.warn("make sure the properties 'stagemonitor.instanceName' and 'stagemonitor.applicationName' " + "are set and stagemonitor.properties is available in the classpath"); } } private static void start() { initializePlugins(); started = true; // in case the application does not directly call shutDown Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { public void run() { shutDown(); } })); } private static void initializePlugins() { final CorePlugin corePlugin = getConfiguration(CorePlugin.class); final Collection<String> disabledPlugins = corePlugin.getDisabledPlugins(); pathsOfWidgetMetricTabPlugins = new LinkedList<String>(); pathsOfWidgetTabPlugins = new LinkedList<String>(); for (StagemonitorPlugin stagemonitorPlugin : plugins) { final String pluginName = stagemonitorPlugin.getClass().getSimpleName(); if (disabledPlugins.contains(pluginName)) { logger.info("Not initializing disabled plugin {}", pluginName); } else { initializePlugin(stagemonitorPlugin, pluginName); } } } private static void initializePlugin(final StagemonitorPlugin stagemonitorPlugin, String pluginName) { logger.info("Initializing plugin {}", pluginName); try { stagemonitorPlugin.initializePlugin(getMetricRegistry(), getConfiguration()); pathsOfWidgetMetricTabPlugins.addAll(stagemonitorPlugin.getPathsOfWidgetMetricTabPlugins()); pathsOfWidgetTabPlugins.addAll(stagemonitorPlugin.getPathsOfWidgetTabPlugins()); onShutdownActions.add(new Runnable() { public void run() { stagemonitorPlugin.onShutDown(); } }); } catch (Exception e) { logger.warn("Error while initializing plugin " + pluginName + " (this exception is ignored)", e); } } /** * Should be called when the server is shutting down. * Calls the {@link StagemonitorPlugin#onShutDown()} method of all plugins */ public static synchronized void shutDown() { if (measurementSession.getEndTimestamp() != null) { // shutDown has already been called return; } measurementSession.setEndTimestamp(System.currentTimeMillis()); for (Runnable onShutdownAction : onShutdownActions) { try { onShutdownAction.run(); } catch (RuntimeException e) { logger.warn(e.getMessage(), e); } } configuration.close(); } public static MetricRegistry getMetricRegistry() { return SharedMetricRegistries.getOrCreate("stagemonitor"); } public static Configuration getConfiguration() { return configuration; } public static <T extends StagemonitorPlugin> T getConfiguration(Class<T> plugin) { return configuration.getConfig(plugin); } static void setConfiguration(Configuration configuration) { Stagemonitor.configuration = configuration; } public static MeasurementSession getMeasurementSession() { return measurementSession; } public static boolean isStarted() { return started; } static boolean isDisabled() { return disabled; } static void setLogger(Logger logger) { Stagemonitor.logger = logger; } /** * @see StagemonitorPlugin#getPathsOfWidgetTabPlugins() */ public static List<String> getPathsOfWidgetTabPlugins() { return Collections.unmodifiableList(pathsOfWidgetTabPlugins); } /** * @see org.stagemonitor.core.StagemonitorPlugin#getPathsOfWidgetMetricTabPlugins() */ public static List<String> getPathsOfWidgetMetricTabPlugins() { return Collections.unmodifiableList(pathsOfWidgetMetricTabPlugins); } /** * Should only be used by the internal unit tests */ public static void reset() { started = false; disabled = false; measurementSession = new MeasurementSession(null, null, null); SharedMetricRegistries.clear(); reloadConfiguration(); tryStartMonitoring(); onShutdownActions.add(MainStagemonitorClassFileTransformer.performRuntimeAttachment()); } private static void tryStartMonitoring() { CorePlugin corePlugin = getConfiguration(CorePlugin.class); MeasurementSession session = new MeasurementSession(corePlugin.getApplicationName(), MeasurementSession.getNameOfLocalHost(), corePlugin.getInstanceName()); startMonitoring(session); } private static void reloadConfiguration() { if (configuration != null) { configuration.close(); } List<ConfigurationSource> configurationSources = new ArrayList<ConfigurationSource>(); for (StagemonitorConfigurationSourceInitializer initializer : ServiceLoader.load(StagemonitorConfigurationSourceInitializer.class, Stagemonitor.class.getClassLoader())) { initializer.modifyConfigurationSources(configurationSources); } configurationSources.remove(null); plugins = ServiceLoader.load(StagemonitorPlugin.class, Stagemonitor.class.getClassLoader()); configuration = new Configuration(plugins, configurationSources, STAGEMONITOR_PASSWORD); try { for (StagemonitorConfigurationSourceInitializer initializer : ServiceLoader.load(StagemonitorConfigurationSourceInitializer.class, Stagemonitor.class.getClassLoader())) { initializer.onConfigurationInitialized(configuration); } } catch (Exception e) { logger.error(e.getMessage(), e); logger.error("Stagemonitor will be deactivated!"); disabled = true; } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2beta1/dlp.proto package com.google.privacy.dlp.v2beta1; /** * <pre> * Cloud repository for storing output. * </pre> * * Protobuf type {@code google.privacy.dlp.v2beta1.OutputStorageConfig} */ public final class OutputStorageConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2beta1.OutputStorageConfig) OutputStorageConfigOrBuilder { // Use OutputStorageConfig.newBuilder() to construct. private OutputStorageConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private OutputStorageConfig() { } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private OutputStorageConfig( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { com.google.privacy.dlp.v2beta1.BigQueryTable.Builder subBuilder = null; if (typeCase_ == 1) { subBuilder = ((com.google.privacy.dlp.v2beta1.BigQueryTable) type_).toBuilder(); } type_ = input.readMessage(com.google.privacy.dlp.v2beta1.BigQueryTable.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom((com.google.privacy.dlp.v2beta1.BigQueryTable) type_); type_ = subBuilder.buildPartial(); } typeCase_ = 1; break; } case 18: { com.google.privacy.dlp.v2beta1.CloudStoragePath.Builder subBuilder = null; if (typeCase_ == 2) { subBuilder = ((com.google.privacy.dlp.v2beta1.CloudStoragePath) type_).toBuilder(); } type_ = input.readMessage(com.google.privacy.dlp.v2beta1.CloudStoragePath.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom((com.google.privacy.dlp.v2beta1.CloudStoragePath) type_); type_ = subBuilder.buildPartial(); } typeCase_ = 2; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2beta1.DlpProto.internal_static_google_privacy_dlp_v2beta1_OutputStorageConfig_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2beta1.DlpProto.internal_static_google_privacy_dlp_v2beta1_OutputStorageConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2beta1.OutputStorageConfig.class, com.google.privacy.dlp.v2beta1.OutputStorageConfig.Builder.class); } private int typeCase_ = 0; private java.lang.Object type_; public enum TypeCase implements com.google.protobuf.Internal.EnumLite { TABLE(1), STORAGE_PATH(2), TYPE_NOT_SET(0); private final int value; private TypeCase(int value) { this.value = value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static TypeCase valueOf(int value) { return forNumber(value); } public static TypeCase forNumber(int value) { switch (value) { case 1: return TABLE; case 2: return STORAGE_PATH; case 0: return TYPE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public TypeCase getTypeCase() { return TypeCase.forNumber( typeCase_); } public static final int TABLE_FIELD_NUMBER = 1; /** * <pre> * Store findings in a new table in the dataset. * </pre> * * <code>.google.privacy.dlp.v2beta1.BigQueryTable table = 1;</code> */ public com.google.privacy.dlp.v2beta1.BigQueryTable getTable() { if (typeCase_ == 1) { return (com.google.privacy.dlp.v2beta1.BigQueryTable) type_; } return com.google.privacy.dlp.v2beta1.BigQueryTable.getDefaultInstance(); } /** * <pre> * Store findings in a new table in the dataset. * </pre> * * <code>.google.privacy.dlp.v2beta1.BigQueryTable table = 1;</code> */ public com.google.privacy.dlp.v2beta1.BigQueryTableOrBuilder getTableOrBuilder() { if (typeCase_ == 1) { return (com.google.privacy.dlp.v2beta1.BigQueryTable) type_; } return com.google.privacy.dlp.v2beta1.BigQueryTable.getDefaultInstance(); } public static final int STORAGE_PATH_FIELD_NUMBER = 2; /** * <pre> * The path to a Google Cloud Storage location to store output. * </pre> * * <code>.google.privacy.dlp.v2beta1.CloudStoragePath storage_path = 2;</code> */ public com.google.privacy.dlp.v2beta1.CloudStoragePath getStoragePath() { if (typeCase_ == 2) { return (com.google.privacy.dlp.v2beta1.CloudStoragePath) type_; } return com.google.privacy.dlp.v2beta1.CloudStoragePath.getDefaultInstance(); } /** * <pre> * The path to a Google Cloud Storage location to store output. * </pre> * * <code>.google.privacy.dlp.v2beta1.CloudStoragePath storage_path = 2;</code> */ public com.google.privacy.dlp.v2beta1.CloudStoragePathOrBuilder getStoragePathOrBuilder() { if (typeCase_ == 2) { return (com.google.privacy.dlp.v2beta1.CloudStoragePath) type_; } return com.google.privacy.dlp.v2beta1.CloudStoragePath.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (typeCase_ == 1) { output.writeMessage(1, (com.google.privacy.dlp.v2beta1.BigQueryTable) type_); } if (typeCase_ == 2) { output.writeMessage(2, (com.google.privacy.dlp.v2beta1.CloudStoragePath) type_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (typeCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.privacy.dlp.v2beta1.BigQueryTable) type_); } if (typeCase_ == 2) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, (com.google.privacy.dlp.v2beta1.CloudStoragePath) type_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2beta1.OutputStorageConfig)) { return super.equals(obj); } com.google.privacy.dlp.v2beta1.OutputStorageConfig other = (com.google.privacy.dlp.v2beta1.OutputStorageConfig) obj; boolean result = true; result = result && getTypeCase().equals( other.getTypeCase()); if (!result) return false; switch (typeCase_) { case 1: result = result && getTable() .equals(other.getTable()); break; case 2: result = result && getStoragePath() .equals(other.getStoragePath()); break; case 0: default: } return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (typeCase_) { case 1: hash = (37 * hash) + TABLE_FIELD_NUMBER; hash = (53 * hash) + getTable().hashCode(); break; case 2: hash = (37 * hash) + STORAGE_PATH_FIELD_NUMBER; hash = (53 * hash) + getStoragePath().hashCode(); break; case 0: default: } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2beta1.OutputStorageConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Cloud repository for storing output. * </pre> * * Protobuf type {@code google.privacy.dlp.v2beta1.OutputStorageConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2beta1.OutputStorageConfig) com.google.privacy.dlp.v2beta1.OutputStorageConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2beta1.DlpProto.internal_static_google_privacy_dlp_v2beta1_OutputStorageConfig_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2beta1.DlpProto.internal_static_google_privacy_dlp_v2beta1_OutputStorageConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2beta1.OutputStorageConfig.class, com.google.privacy.dlp.v2beta1.OutputStorageConfig.Builder.class); } // Construct using com.google.privacy.dlp.v2beta1.OutputStorageConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); typeCase_ = 0; type_ = null; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2beta1.DlpProto.internal_static_google_privacy_dlp_v2beta1_OutputStorageConfig_descriptor; } public com.google.privacy.dlp.v2beta1.OutputStorageConfig getDefaultInstanceForType() { return com.google.privacy.dlp.v2beta1.OutputStorageConfig.getDefaultInstance(); } public com.google.privacy.dlp.v2beta1.OutputStorageConfig build() { com.google.privacy.dlp.v2beta1.OutputStorageConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.privacy.dlp.v2beta1.OutputStorageConfig buildPartial() { com.google.privacy.dlp.v2beta1.OutputStorageConfig result = new com.google.privacy.dlp.v2beta1.OutputStorageConfig(this); if (typeCase_ == 1) { if (tableBuilder_ == null) { result.type_ = type_; } else { result.type_ = tableBuilder_.build(); } } if (typeCase_ == 2) { if (storagePathBuilder_ == null) { result.type_ = type_; } else { result.type_ = storagePathBuilder_.build(); } } result.typeCase_ = typeCase_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2beta1.OutputStorageConfig) { return mergeFrom((com.google.privacy.dlp.v2beta1.OutputStorageConfig)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2beta1.OutputStorageConfig other) { if (other == com.google.privacy.dlp.v2beta1.OutputStorageConfig.getDefaultInstance()) return this; switch (other.getTypeCase()) { case TABLE: { mergeTable(other.getTable()); break; } case STORAGE_PATH: { mergeStoragePath(other.getStoragePath()); break; } case TYPE_NOT_SET: { break; } } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.privacy.dlp.v2beta1.OutputStorageConfig parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.privacy.dlp.v2beta1.OutputStorageConfig) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int typeCase_ = 0; private java.lang.Object type_; public TypeCase getTypeCase() { return TypeCase.forNumber( typeCase_); } public Builder clearType() { typeCase_ = 0; type_ = null; onChanged(); return this; } private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2beta1.BigQueryTable, com.google.privacy.dlp.v2beta1.BigQueryTable.Builder, com.google.privacy.dlp.v2beta1.BigQueryTableOrBuilder> tableBuilder_; /** * <pre> * Store findings in a new table in the dataset. * </pre> * * <code>.google.privacy.dlp.v2beta1.BigQueryTable table = 1;</code> */ public com.google.privacy.dlp.v2beta1.BigQueryTable getTable() { if (tableBuilder_ == null) { if (typeCase_ == 1) { return (com.google.privacy.dlp.v2beta1.BigQueryTable) type_; } return com.google.privacy.dlp.v2beta1.BigQueryTable.getDefaultInstance(); } else { if (typeCase_ == 1) { return tableBuilder_.getMessage(); } return com.google.privacy.dlp.v2beta1.BigQueryTable.getDefaultInstance(); } } /** * <pre> * Store findings in a new table in the dataset. * </pre> * * <code>.google.privacy.dlp.v2beta1.BigQueryTable table = 1;</code> */ public Builder setTable(com.google.privacy.dlp.v2beta1.BigQueryTable value) { if (tableBuilder_ == null) { if (value == null) { throw new NullPointerException(); } type_ = value; onChanged(); } else { tableBuilder_.setMessage(value); } typeCase_ = 1; return this; } /** * <pre> * Store findings in a new table in the dataset. * </pre> * * <code>.google.privacy.dlp.v2beta1.BigQueryTable table = 1;</code> */ public Builder setTable( com.google.privacy.dlp.v2beta1.BigQueryTable.Builder builderForValue) { if (tableBuilder_ == null) { type_ = builderForValue.build(); onChanged(); } else { tableBuilder_.setMessage(builderForValue.build()); } typeCase_ = 1; return this; } /** * <pre> * Store findings in a new table in the dataset. * </pre> * * <code>.google.privacy.dlp.v2beta1.BigQueryTable table = 1;</code> */ public Builder mergeTable(com.google.privacy.dlp.v2beta1.BigQueryTable value) { if (tableBuilder_ == null) { if (typeCase_ == 1 && type_ != com.google.privacy.dlp.v2beta1.BigQueryTable.getDefaultInstance()) { type_ = com.google.privacy.dlp.v2beta1.BigQueryTable.newBuilder((com.google.privacy.dlp.v2beta1.BigQueryTable) type_) .mergeFrom(value).buildPartial(); } else { type_ = value; } onChanged(); } else { if (typeCase_ == 1) { tableBuilder_.mergeFrom(value); } tableBuilder_.setMessage(value); } typeCase_ = 1; return this; } /** * <pre> * Store findings in a new table in the dataset. * </pre> * * <code>.google.privacy.dlp.v2beta1.BigQueryTable table = 1;</code> */ public Builder clearTable() { if (tableBuilder_ == null) { if (typeCase_ == 1) { typeCase_ = 0; type_ = null; onChanged(); } } else { if (typeCase_ == 1) { typeCase_ = 0; type_ = null; } tableBuilder_.clear(); } return this; } /** * <pre> * Store findings in a new table in the dataset. * </pre> * * <code>.google.privacy.dlp.v2beta1.BigQueryTable table = 1;</code> */ public com.google.privacy.dlp.v2beta1.BigQueryTable.Builder getTableBuilder() { return getTableFieldBuilder().getBuilder(); } /** * <pre> * Store findings in a new table in the dataset. * </pre> * * <code>.google.privacy.dlp.v2beta1.BigQueryTable table = 1;</code> */ public com.google.privacy.dlp.v2beta1.BigQueryTableOrBuilder getTableOrBuilder() { if ((typeCase_ == 1) && (tableBuilder_ != null)) { return tableBuilder_.getMessageOrBuilder(); } else { if (typeCase_ == 1) { return (com.google.privacy.dlp.v2beta1.BigQueryTable) type_; } return com.google.privacy.dlp.v2beta1.BigQueryTable.getDefaultInstance(); } } /** * <pre> * Store findings in a new table in the dataset. * </pre> * * <code>.google.privacy.dlp.v2beta1.BigQueryTable table = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2beta1.BigQueryTable, com.google.privacy.dlp.v2beta1.BigQueryTable.Builder, com.google.privacy.dlp.v2beta1.BigQueryTableOrBuilder> getTableFieldBuilder() { if (tableBuilder_ == null) { if (!(typeCase_ == 1)) { type_ = com.google.privacy.dlp.v2beta1.BigQueryTable.getDefaultInstance(); } tableBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2beta1.BigQueryTable, com.google.privacy.dlp.v2beta1.BigQueryTable.Builder, com.google.privacy.dlp.v2beta1.BigQueryTableOrBuilder>( (com.google.privacy.dlp.v2beta1.BigQueryTable) type_, getParentForChildren(), isClean()); type_ = null; } typeCase_ = 1; onChanged();; return tableBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2beta1.CloudStoragePath, com.google.privacy.dlp.v2beta1.CloudStoragePath.Builder, com.google.privacy.dlp.v2beta1.CloudStoragePathOrBuilder> storagePathBuilder_; /** * <pre> * The path to a Google Cloud Storage location to store output. * </pre> * * <code>.google.privacy.dlp.v2beta1.CloudStoragePath storage_path = 2;</code> */ public com.google.privacy.dlp.v2beta1.CloudStoragePath getStoragePath() { if (storagePathBuilder_ == null) { if (typeCase_ == 2) { return (com.google.privacy.dlp.v2beta1.CloudStoragePath) type_; } return com.google.privacy.dlp.v2beta1.CloudStoragePath.getDefaultInstance(); } else { if (typeCase_ == 2) { return storagePathBuilder_.getMessage(); } return com.google.privacy.dlp.v2beta1.CloudStoragePath.getDefaultInstance(); } } /** * <pre> * The path to a Google Cloud Storage location to store output. * </pre> * * <code>.google.privacy.dlp.v2beta1.CloudStoragePath storage_path = 2;</code> */ public Builder setStoragePath(com.google.privacy.dlp.v2beta1.CloudStoragePath value) { if (storagePathBuilder_ == null) { if (value == null) { throw new NullPointerException(); } type_ = value; onChanged(); } else { storagePathBuilder_.setMessage(value); } typeCase_ = 2; return this; } /** * <pre> * The path to a Google Cloud Storage location to store output. * </pre> * * <code>.google.privacy.dlp.v2beta1.CloudStoragePath storage_path = 2;</code> */ public Builder setStoragePath( com.google.privacy.dlp.v2beta1.CloudStoragePath.Builder builderForValue) { if (storagePathBuilder_ == null) { type_ = builderForValue.build(); onChanged(); } else { storagePathBuilder_.setMessage(builderForValue.build()); } typeCase_ = 2; return this; } /** * <pre> * The path to a Google Cloud Storage location to store output. * </pre> * * <code>.google.privacy.dlp.v2beta1.CloudStoragePath storage_path = 2;</code> */ public Builder mergeStoragePath(com.google.privacy.dlp.v2beta1.CloudStoragePath value) { if (storagePathBuilder_ == null) { if (typeCase_ == 2 && type_ != com.google.privacy.dlp.v2beta1.CloudStoragePath.getDefaultInstance()) { type_ = com.google.privacy.dlp.v2beta1.CloudStoragePath.newBuilder((com.google.privacy.dlp.v2beta1.CloudStoragePath) type_) .mergeFrom(value).buildPartial(); } else { type_ = value; } onChanged(); } else { if (typeCase_ == 2) { storagePathBuilder_.mergeFrom(value); } storagePathBuilder_.setMessage(value); } typeCase_ = 2; return this; } /** * <pre> * The path to a Google Cloud Storage location to store output. * </pre> * * <code>.google.privacy.dlp.v2beta1.CloudStoragePath storage_path = 2;</code> */ public Builder clearStoragePath() { if (storagePathBuilder_ == null) { if (typeCase_ == 2) { typeCase_ = 0; type_ = null; onChanged(); } } else { if (typeCase_ == 2) { typeCase_ = 0; type_ = null; } storagePathBuilder_.clear(); } return this; } /** * <pre> * The path to a Google Cloud Storage location to store output. * </pre> * * <code>.google.privacy.dlp.v2beta1.CloudStoragePath storage_path = 2;</code> */ public com.google.privacy.dlp.v2beta1.CloudStoragePath.Builder getStoragePathBuilder() { return getStoragePathFieldBuilder().getBuilder(); } /** * <pre> * The path to a Google Cloud Storage location to store output. * </pre> * * <code>.google.privacy.dlp.v2beta1.CloudStoragePath storage_path = 2;</code> */ public com.google.privacy.dlp.v2beta1.CloudStoragePathOrBuilder getStoragePathOrBuilder() { if ((typeCase_ == 2) && (storagePathBuilder_ != null)) { return storagePathBuilder_.getMessageOrBuilder(); } else { if (typeCase_ == 2) { return (com.google.privacy.dlp.v2beta1.CloudStoragePath) type_; } return com.google.privacy.dlp.v2beta1.CloudStoragePath.getDefaultInstance(); } } /** * <pre> * The path to a Google Cloud Storage location to store output. * </pre> * * <code>.google.privacy.dlp.v2beta1.CloudStoragePath storage_path = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2beta1.CloudStoragePath, com.google.privacy.dlp.v2beta1.CloudStoragePath.Builder, com.google.privacy.dlp.v2beta1.CloudStoragePathOrBuilder> getStoragePathFieldBuilder() { if (storagePathBuilder_ == null) { if (!(typeCase_ == 2)) { type_ = com.google.privacy.dlp.v2beta1.CloudStoragePath.getDefaultInstance(); } storagePathBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2beta1.CloudStoragePath, com.google.privacy.dlp.v2beta1.CloudStoragePath.Builder, com.google.privacy.dlp.v2beta1.CloudStoragePathOrBuilder>( (com.google.privacy.dlp.v2beta1.CloudStoragePath) type_, getParentForChildren(), isClean()); type_ = null; } typeCase_ = 2; onChanged();; return storagePathBuilder_; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2beta1.OutputStorageConfig) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2beta1.OutputStorageConfig) private static final com.google.privacy.dlp.v2beta1.OutputStorageConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2beta1.OutputStorageConfig(); } public static com.google.privacy.dlp.v2beta1.OutputStorageConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<OutputStorageConfig> PARSER = new com.google.protobuf.AbstractParser<OutputStorageConfig>() { public OutputStorageConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new OutputStorageConfig(input, extensionRegistry); } }; public static com.google.protobuf.Parser<OutputStorageConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<OutputStorageConfig> getParserForType() { return PARSER; } public com.google.privacy.dlp.v2beta1.OutputStorageConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivy.osgi.updatesite.xml; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.text.ParseException; import javax.xml.parsers.ParserConfigurationException; import org.apache.ivy.osgi.util.DelegetingHandler; import org.apache.ivy.osgi.util.Version; import org.apache.ivy.util.XMLHelper; import org.xml.sax.Attributes; import org.xml.sax.SAXException; public class EclipseUpdateSiteParser { public static UpdateSite parse(InputStream in) throws ParseException, IOException, SAXException { SiteHandler handler = new SiteHandler(); try { XMLHelper.parse(in, null, handler, null); } catch (ParserConfigurationException e) { throw new SAXException(e); } return handler.updatesite; } static class SiteHandler extends DelegetingHandler { private static final String SITE = "site"; private static final String URL = "url"; private static final String PACK200 = "pack200"; private static final String MIRRORS_URL = "mirrorsURL"; private static final String ASSOCIATE_SITES_URL = "associateSitesURL"; private static final String DIGEST_URL = "digestURL"; UpdateSite updatesite; public SiteHandler() { super(SITE); // addChild(new DescriptionHandler(), new ChildElementHandler() { // public void childHanlded(DelegetingHandler child) { // updateSite.setDescription(child.getBufferedChars().trim()); // } // }); addChild(new FeatureHandler(), new ChildElementHandler() { public void childHanlded(DelegetingHandler child) { updatesite.addFeature(((FeatureHandler) child).feature); } }); // addChild(new ArchiveHandler(), new ChildElementHandler() { // public void childHanlded(DelegetingHandler child) { // updateSite.addArchive(((ArchiveHandler) child).archive); // } // }); // addChild(new CategoryDefHandler(), new ChildElementHandler() { // public void childHanlded(DelegetingHandler child) { // updateSite.addCategoryDef(((CategoryDefHandler) child).categoryDef); // } // }); } protected void handleAttributes(Attributes atts) { updatesite = new UpdateSite(); String url = atts.getValue(URL); if (url != null && !("".equals(url.trim()))) { if (!url.endsWith("/") && !url.endsWith(File.separator)) { url += "/"; } try { updatesite.setUri(new URI(url)); } catch (URISyntaxException e) { throw new RuntimeException("illegal url", e); } } String mirrorsURL = atts.getValue(MIRRORS_URL); if (mirrorsURL != null && mirrorsURL.trim().length() > 0) { updatesite.setMirrorsURL(mirrorsURL); } String pack200 = atts.getValue(PACK200); if (pack200 != null && new Boolean(pack200).booleanValue()) { updatesite.setPack200(true); } String digestURL = atts.getValue(DIGEST_URL); if (digestURL != null) { try { updatesite.setDigestUri(new URI(digestURL)); } catch (URISyntaxException e) { throw new RuntimeException("illegal url", e); } } String associateSitesURL = atts.getValue(ASSOCIATE_SITES_URL); if (associateSitesURL != null) { updatesite.setAssociateSitesURL(associateSitesURL); } } } static class DescriptionHandler extends DelegetingHandler { private static final String DESCRIPTION = "description"; private static final String URL = "url"; public DescriptionHandler() { super(DESCRIPTION); setBufferingChar(true); } protected void handleAttributes(Attributes atts) { String url = atts.getValue(URL); } } static class FeatureHandler extends DelegetingHandler { private static final String FEATURE = "feature"; private static final String VERSION = "version"; private static final String ID = "id"; private static final String URL = "url"; private static final String PATCH = "patch"; private static final String ARCH = "arch"; private static final String NL = "nl"; private static final String WS = "ws"; private static final String OS = "os"; private static final String LABEL = "label"; private static final String TYPE = "type"; private EclipseFeature feature; public FeatureHandler() { super(FEATURE); addChild(new CategoryHandler(), new ChildElementHandler() { public void childHanlded(DelegetingHandler child) { feature.addCategory(((CategoryHandler) child).name); } }); } protected void handleAttributes(Attributes atts) throws SAXException { String id = atts.getValue(ID); String version = atts.getValue(VERSION); try { feature = new EclipseFeature(id, new Version(version)); } catch (ParseException e) { throw new SAXException("Incorrect version on the feature '" + id + "': " + version + " (" + e.getMessage() + ")"); } String url = atts.getValue(URL); if (url != null) { feature.setURL(url); } feature.setType(atts.getValue(TYPE)); feature.setLabel(atts.getValue(LABEL)); feature.setOS(atts.getValue(OS)); feature.setWS(atts.getValue(WS)); feature.setNL(atts.getValue(NL)); feature.setArch(atts.getValue(ARCH)); feature.setPatch(atts.getValue(PATCH)); } } static class CategoryHandler extends DelegetingHandler { private static final String CATEGORY = "category"; private static final String NAME = "name"; String name; public CategoryHandler() { super(CATEGORY); } protected void handleAttributes(Attributes atts) throws SAXException { name = atts.getValue(NAME); } } static class ArchiveHandler extends DelegetingHandler { private static final String ARCHIVE = "archive"; private static final String URL = "url"; private static final String PATH = "path"; private Archive archive; public ArchiveHandler() { super(ARCHIVE); } protected void handleAttributes(Attributes atts) throws SAXException { archive = new Archive(); String path = atts.getValue(PATH); archive.setPath(path); String url = atts.getValue(URL); archive.setURL(url); } } static class CategoryDefHandler extends DelegetingHandler { private static final String CATEGORY_DEF = "category-def"; private static final String NAME = "name"; private static final String LABEL = "label"; private CategoryDef categoryDef; public CategoryDefHandler() { super(CATEGORY_DEF); addChild(new DescriptionHandler(), new ChildElementHandler() { public void childHanlded(DelegetingHandler child) { categoryDef.setDescription(child.getBufferedChars().trim()); } }); } protected void handleAttributes(Attributes atts) throws SAXException { categoryDef = new CategoryDef(); String name = atts.getValue(NAME); categoryDef.setName(name); String label = atts.getValue(LABEL); categoryDef.setLabel(label); } } }
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.appmenu; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.graphics.drawable.Drawable; import android.view.LayoutInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.ImageView; import android.widget.ListView; import android.widget.TextView; import org.chromium.base.ApiCompatibilityUtils; import org.chromium.chrome.R; import org.chromium.chrome.browser.widget.TintedImageButton; import org.chromium.ui.base.LocalizationUtils; import org.chromium.ui.interpolators.BakedBezierInterpolator; import java.util.List; /** * ListAdapter to customize the view of items in the list. */ class AppMenuAdapter extends BaseAdapter { /** * Regular Android menu item that contains a title and an icon if icon is specified. */ private static final int STANDARD_MENU_ITEM = 0; /** * Menu item that has two buttons, the first one is a title and the second one is an icon. * It is different from the regular menu item because it contains two separate buttons. */ private static final int TITLE_BUTTON_MENU_ITEM = 1; /** * Menu item that has four buttons. Every one of these buttons is displayed as an icon. */ private static final int THREE_BUTTON_MENU_ITEM = 2; /** * Menu item that has four buttons. Every one of these buttons is displayed as an icon. */ private static final int FOUR_BUTTON_MENU_ITEM = 3; /** * The number of view types specified above. If you add a view type you MUST increment this. */ private static final int VIEW_TYPE_COUNT = 4; /** MenuItem Animation Constants */ private static final int ENTER_ITEM_DURATION_MS = 350; private static final int ENTER_ITEM_BASE_DELAY_MS = 80; private static final int ENTER_ITEM_ADDL_DELAY_MS = 30; private static final float ENTER_STANDARD_ITEM_OFFSET_Y_DP = -10.f; private static final float ENTER_STANDARD_ITEM_OFFSET_X_DP = 10.f; private final AppMenu mAppMenu; private final LayoutInflater mInflater; private final List<MenuItem> mMenuItems; private final int mNumMenuItems; private final float mDpToPx; public AppMenuAdapter(AppMenu appMenu, List<MenuItem> menuItems, LayoutInflater inflater) { mAppMenu = appMenu; mMenuItems = menuItems; mInflater = inflater; mNumMenuItems = menuItems.size(); mDpToPx = inflater.getContext().getResources().getDisplayMetrics().density; } @Override public int getCount() { return mNumMenuItems; } @Override public int getViewTypeCount() { return VIEW_TYPE_COUNT; } @Override public int getItemViewType(int position) { MenuItem item = getItem(position); int viewCount = item.hasSubMenu() ? item.getSubMenu().size() : 1; if (viewCount == 4) { return FOUR_BUTTON_MENU_ITEM; } else if (viewCount == 3) { return THREE_BUTTON_MENU_ITEM; } else if (viewCount == 2) { return TITLE_BUTTON_MENU_ITEM; } return STANDARD_MENU_ITEM; } @Override public long getItemId(int position) { return getItem(position).getItemId(); } @Override public MenuItem getItem(int position) { if (position == ListView.INVALID_POSITION) return null; assert position >= 0; assert position < mMenuItems.size(); return mMenuItems.get(position); } @Override public View getView(int position, View convertView, ViewGroup parent) { final MenuItem item = getItem(position); switch (getItemViewType(position)) { case STANDARD_MENU_ITEM: { StandardMenuItemViewHolder holder = null; if (convertView == null) { holder = new StandardMenuItemViewHolder(); convertView = mInflater.inflate(R.layout.menu_item, parent, false); holder.text = (TextView) convertView.findViewById(R.id.menu_item_text); holder.image = (AppMenuItemIcon) convertView.findViewById(R.id.menu_item_icon); convertView.setTag(holder); convertView.setTag(R.id.menu_item_enter_anim_id, buildStandardItemEnterAnimator(convertView, position)); } else { holder = (StandardMenuItemViewHolder) convertView.getTag(); } convertView.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { mAppMenu.onItemClick(item); } }); // Set up the icon. Drawable icon = item.getIcon(); holder.image.setImageDrawable(icon); holder.image.setVisibility(icon == null ? View.GONE : View.VISIBLE); holder.image.setChecked(item.isChecked()); holder.text.setText(item.getTitle()); holder.text.setContentDescription(item.getTitleCondensed()); boolean isEnabled = item.isEnabled(); // Set the text color (using a color state list). holder.text.setEnabled(isEnabled); // This will ensure that the item is not highlighted when selected. convertView.setEnabled(isEnabled); break; } case THREE_BUTTON_MENU_ITEM: { ThreeButtonMenuItemViewHolder holder = null; if (convertView == null) { holder = new ThreeButtonMenuItemViewHolder(); convertView = mInflater.inflate(R.layout.three_button_menu_item, parent, false); holder.buttons[0] = (TintedImageButton) convertView.findViewById(R.id.button_one); holder.buttons[1] = (TintedImageButton) convertView.findViewById(R.id.button_two); holder.buttons[2] = (TintedImageButton) convertView.findViewById(R.id.button_three); convertView.setTag(holder); convertView.setTag(R.id.menu_item_enter_anim_id, buildIconItemEnterAnimator(holder.buttons)); } else { holder = (ThreeButtonMenuItemViewHolder) convertView.getTag(); } for (int i = 0; i < 3; i++) { setupImageButton(holder.buttons[i], item.getSubMenu().getItem(i)); } convertView.setFocusable(false); convertView.setEnabled(false); break; } case FOUR_BUTTON_MENU_ITEM: { FourButtonMenuItemViewHolder holder = null; if (convertView == null) { holder = new FourButtonMenuItemViewHolder(); convertView = mInflater.inflate(R.layout.four_button_menu_item, parent, false); holder.buttons[0] = (TintedImageButton) convertView.findViewById(R.id.button_one); holder.buttons[1] = (TintedImageButton) convertView.findViewById(R.id.button_two); holder.buttons[2] = (TintedImageButton) convertView.findViewById(R.id.button_three); holder.buttons[3] = (TintedImageButton) convertView.findViewById(R.id.button_four); convertView.setTag(holder); convertView.setTag(R.id.menu_item_enter_anim_id, buildIconItemEnterAnimator(holder.buttons)); } else { holder = (FourButtonMenuItemViewHolder) convertView.getTag(); } for (int i = 0; i < 4; i++) { setupImageButton(holder.buttons[i], item.getSubMenu().getItem(i)); } convertView.setFocusable(false); convertView.setEnabled(false); break; } case TITLE_BUTTON_MENU_ITEM: { TitleButtonMenuItemViewHolder holder = null; if (convertView == null) { holder = new TitleButtonMenuItemViewHolder(); convertView = mInflater.inflate(R.layout.title_button_menu_item, parent, false); holder.title = (TextView) convertView.findViewById(R.id.title); holder.button = (TintedImageButton) convertView.findViewById(R.id.button); View animatedView = convertView; convertView.setTag(holder); convertView.setTag(R.id.menu_item_enter_anim_id, buildStandardItemEnterAnimator(animatedView, position)); } else { holder = (TitleButtonMenuItemViewHolder) convertView.getTag(); } final MenuItem titleItem = item.hasSubMenu() ? item.getSubMenu().getItem(0) : item; holder.title.setText(titleItem.getTitle()); holder.title.setEnabled(titleItem.isEnabled()); holder.title.setFocusable(titleItem.isEnabled()); holder.title.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { mAppMenu.onItemClick(titleItem); } }); if (item.getSubMenu().getItem(1).getIcon() != null) { holder.button.setVisibility(View.VISIBLE); setupImageButton(holder.button, item.getSubMenu().getItem(1)); } else { holder.button.setVisibility(View.GONE); } convertView.setFocusable(false); convertView.setEnabled(false); break; } default: assert false : "Unexpected MenuItem type"; } return convertView; } private void setupImageButton(TintedImageButton button, final MenuItem item) { // Store and recover the level of image as button.setimageDrawable // resets drawable to default level. int currentLevel = item.getIcon().getLevel(); button.setImageDrawable(item.getIcon()); item.getIcon().setLevel(currentLevel); if (item.isChecked()) { button.setTint(ApiCompatibilityUtils.getColorStateList( button.getResources(), R.color.blue_mode_tint)); } button.setEnabled(item.isEnabled()); button.setFocusable(item.isEnabled()); button.setContentDescription(item.getTitleCondensed()); button.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { mAppMenu.onItemClick(item); } }); } /** * This builds an {@link Animator} for the enter animation of a standard menu item. This means * it will animate the alpha from 0 to 1 and translate the view from -10dp to 0dp on the y axis. * * @param view The menu item {@link View} to be animated. * @param position The position in the menu. This impacts the start delay of the animation. * @return The {@link Animator}. */ private Animator buildStandardItemEnterAnimator(final View view, int position) { final float offsetYPx = ENTER_STANDARD_ITEM_OFFSET_Y_DP * mDpToPx; final int startDelay = ENTER_ITEM_BASE_DELAY_MS + ENTER_ITEM_ADDL_DELAY_MS * position; AnimatorSet animation = new AnimatorSet(); animation.playTogether( ObjectAnimator.ofFloat(view, View.ALPHA, 0.f, 1.f), ObjectAnimator.ofFloat(view, View.TRANSLATION_Y, offsetYPx, 0.f)); animation.setDuration(ENTER_ITEM_DURATION_MS); animation.setStartDelay(startDelay); animation.setInterpolator(BakedBezierInterpolator.FADE_IN_CURVE); animation.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { view.setAlpha(0.f); } }); return animation; } /** * This builds an {@link Animator} for the enter animation of icon row menu items. This means * it will animate the alpha from 0 to 1 and translate the views from 10dp to 0dp on the x axis. * * @param views The list if icons in the menu item that should be animated. * @return The {@link Animator}. */ private Animator buildIconItemEnterAnimator(final ImageView[] views) { final boolean rtl = LocalizationUtils.isLayoutRtl(); final float offsetXPx = ENTER_STANDARD_ITEM_OFFSET_X_DP * mDpToPx * (rtl ? -1.f : 1.f); final int maxViewsToAnimate = views.length; AnimatorSet animation = new AnimatorSet(); AnimatorSet.Builder builder = null; for (int i = 0; i < maxViewsToAnimate; i++) { final int startDelay = ENTER_ITEM_ADDL_DELAY_MS * i; Animator alpha = ObjectAnimator.ofFloat(views[i], View.ALPHA, 0.f, 1.f); Animator translate = ObjectAnimator.ofFloat(views[i], View.TRANSLATION_X, offsetXPx, 0); alpha.setStartDelay(startDelay); translate.setStartDelay(startDelay); alpha.setDuration(ENTER_ITEM_DURATION_MS); translate.setDuration(ENTER_ITEM_DURATION_MS); if (builder == null) { builder = animation.play(alpha); } else { builder.with(alpha); } builder.with(translate); } animation.setStartDelay(ENTER_ITEM_BASE_DELAY_MS); animation.setInterpolator(BakedBezierInterpolator.FADE_IN_CURVE); animation.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationStart(Animator animation) { for (int i = 0; i < maxViewsToAnimate; i++) { views[i].setAlpha(0.f); } } }); return animation; } static class StandardMenuItemViewHolder { public TextView text; public AppMenuItemIcon image; } static class ThreeButtonMenuItemViewHolder { public TintedImageButton[] buttons = new TintedImageButton[3]; } static class FourButtonMenuItemViewHolder { public TintedImageButton[] buttons = new TintedImageButton[4]; } static class TitleButtonMenuItemViewHolder { public TextView title; public TintedImageButton button; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated from EagleFilter.g4 by ANTLR 4.5 package org.apache.eagle.query.antlr.generated; import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) public class EagleFilterLexer extends Lexer { static { RuntimeMetaData.checkVersion("4.5", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int WHITESPACE=1, OP=2, AND=3, OR=4, ID=5, VALUE=6, SINGLE_VALUE=7, EXPR=8, NUMBER=9, NULL=10, SET=11, DOUBLEQUOTED_STRING=12, LPAREN=13, RPAREN=14, LBRACE=15, RBRACE=16; public static String[] modeNames = { "DEFAULT_MODE" }; public static final String[] ruleNames = { "WHITESPACE", "OP", "AND", "OR", "ID", "VALUE", "SINGLE_VALUE", "EXPR", "NUMBER", "NULL", "SET", "DOUBLEQUOTED_STRING", "UNSIGN_INT", "STRING", "LPAREN", "RPAREN", "LBRACE", "RBRACE" }; private static final String[] _LITERAL_NAMES = { null, null, null, null, null, null, null, null, null, null, null, null, null, "'('", "')'", "'{'", "'}'" }; private static final String[] _SYMBOLIC_NAMES = { null, "WHITESPACE", "OP", "AND", "OR", "ID", "VALUE", "SINGLE_VALUE", "EXPR", "NUMBER", "NULL", "SET", "DOUBLEQUOTED_STRING", "LPAREN", "RPAREN", "LBRACE", "RBRACE" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); /** * @deprecated Use {@link #VOCABULARY} instead. */ @Deprecated public static final String[] tokenNames; static { tokenNames = new String[_SYMBOLIC_NAMES.length]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = VOCABULARY.getLiteralName(i); if (tokenNames[i] == null) { tokenNames[i] = VOCABULARY.getSymbolicName(i); } if (tokenNames[i] == null) { tokenNames[i] = "<INVALID>"; } } } @Override @Deprecated public String[] getTokenNames() { return tokenNames; } @Override public Vocabulary getVocabulary() { return VOCABULARY; } public EagleFilterLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } @Override public String getGrammarFileName() { return "EagleFilter.g4"; } @Override public String[] getRuleNames() { return ruleNames; } @Override public String getSerializedATN() { return _serializedATN; } @Override public String[] getModeNames() { return modeNames; } @Override public ATN getATN() { return _ATN; } public static final String _serializedATN = "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\22\u010c\b\1\4\2"+ "\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4"+ "\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ "\t\22\4\23\t\23\3\2\6\2)\n\2\r\2\16\2*\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3"+ "\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\6\3E"+ "\n\3\r\3\16\3F\3\3\3\3\3\3\3\3\3\3\3\3\3\3\6\3P\n\3\r\3\16\3Q\3\3\3\3"+ "\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3"+ "\3\3\3\3\3\3\3\6\3k\n\3\r\3\16\3l\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3"+ "\3\3\3\3\3\3\3\3\6\3|\n\3\r\3\16\3}\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3"+ "\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\6\3\u0090\n\3\r\3\16\3\u0091\3\3\3\3\3"+ "\3\3\3\3\3\3\3\3\3\6\3\u009b\n\3\r\3\16\3\u009c\3\3\3\3\3\3\5\3\u00a2"+ "\n\3\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u00aa\n\4\3\5\3\5\3\5\3\5\5\5\u00b0\n"+ "\5\3\6\3\6\6\6\u00b4\n\6\r\6\16\6\u00b5\3\7\3\7\3\7\5\7\u00bb\n\7\3\b"+ "\3\b\3\b\5\b\u00c0\n\b\3\t\3\t\3\t\3\t\3\t\3\t\5\t\u00c8\n\t\3\t\3\t\6"+ "\t\u00cc\n\t\r\t\16\t\u00cd\3\t\3\t\3\n\5\n\u00d3\n\n\3\n\3\n\3\n\5\n"+ "\u00d8\n\n\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\5\13\u00e2\n\13\3\f"+ "\3\f\5\f\u00e6\n\f\3\f\3\f\7\f\u00ea\n\f\f\f\16\f\u00ed\13\f\3\f\3\f\3"+ "\r\3\r\3\r\3\r\3\r\3\r\5\r\u00f7\n\r\3\16\6\16\u00fa\n\16\r\16\16\16\u00fb"+ "\3\17\3\17\3\17\6\17\u0101\n\17\r\17\16\17\u0102\3\20\3\20\3\21\3\21\3"+ "\22\3\22\3\23\3\23\2\2\24\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25"+ "\f\27\r\31\16\33\2\35\2\37\17!\20#\21%\22\3\2\7\5\2\13\f\16\17\"\"\4\2"+ ">>@@\6\2\"\"$$*+>@\3\2\177\177\3\2$$\u0134\2\3\3\2\2\2\2\5\3\2\2\2\2\7"+ "\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2"+ "\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\37\3\2\2\2\2"+ "!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\3(\3\2\2\2\5\u00a1\3\2\2\2\7\u00a9\3\2"+ "\2\2\t\u00af\3\2\2\2\13\u00b1\3\2\2\2\r\u00ba\3\2\2\2\17\u00bf\3\2\2\2"+ "\21\u00c7\3\2\2\2\23\u00d2\3\2\2\2\25\u00e1\3\2\2\2\27\u00e3\3\2\2\2\31"+ "\u00f6\3\2\2\2\33\u00f9\3\2\2\2\35\u0100\3\2\2\2\37\u0104\3\2\2\2!\u0106"+ "\3\2\2\2#\u0108\3\2\2\2%\u010a\3\2\2\2\')\t\2\2\2(\'\3\2\2\2)*\3\2\2\2"+ "*(\3\2\2\2*+\3\2\2\2+,\3\2\2\2,-\b\2\2\2-\4\3\2\2\2.\u00a2\7?\2\2/\60"+ "\7#\2\2\60\u00a2\7?\2\2\61\u00a2\t\3\2\2\62\63\7@\2\2\63\u00a2\7?\2\2"+ "\64\65\7>\2\2\65\u00a2\7?\2\2\66\67\7?\2\2\67\u00a2\7\u0080\2\289\7#\2"+ "\29:\7?\2\2:\u00a2\7\u0080\2\2;<\7k\2\2<\u00a2\7p\2\2=>\7K\2\2>\u00a2"+ "\7P\2\2?@\7p\2\2@A\7q\2\2AB\7v\2\2BD\3\2\2\2CE\7\"\2\2DC\3\2\2\2EF\3\2"+ "\2\2FD\3\2\2\2FG\3\2\2\2GH\3\2\2\2HI\7k\2\2I\u00a2\7p\2\2JK\7P\2\2KL\7"+ "Q\2\2LM\7V\2\2MO\3\2\2\2NP\7\"\2\2ON\3\2\2\2PQ\3\2\2\2QO\3\2\2\2QR\3\2"+ "\2\2RS\3\2\2\2ST\7K\2\2T\u00a2\7P\2\2UV\7e\2\2VW\7q\2\2WX\7p\2\2XY\7v"+ "\2\2YZ\7c\2\2Z[\7k\2\2[\\\7p\2\2\\\u00a2\7u\2\2]^\7E\2\2^_\7Q\2\2_`\7"+ "P\2\2`a\7V\2\2ab\7C\2\2bc\7K\2\2cd\7P\2\2d\u00a2\7U\2\2ef\7p\2\2fg\7q"+ "\2\2gh\7v\2\2hj\3\2\2\2ik\7\"\2\2ji\3\2\2\2kl\3\2\2\2lj\3\2\2\2lm\3\2"+ "\2\2mn\3\2\2\2no\7e\2\2op\7q\2\2pq\7p\2\2qr\7v\2\2rs\7c\2\2st\7k\2\2t"+ "u\7p\2\2u\u00a2\7u\2\2vw\7P\2\2wx\7Q\2\2xy\7V\2\2y{\3\2\2\2z|\7\"\2\2"+ "{z\3\2\2\2|}\3\2\2\2}{\3\2\2\2}~\3\2\2\2~\177\3\2\2\2\177\u0080\7E\2\2"+ "\u0080\u0081\7Q\2\2\u0081\u0082\7P\2\2\u0082\u0083\7V\2\2\u0083\u0084"+ "\7C\2\2\u0084\u0085\7K\2\2\u0085\u0086\7P\2\2\u0086\u00a2\7U\2\2\u0087"+ "\u0088\7k\2\2\u0088\u00a2\7u\2\2\u0089\u008a\7K\2\2\u008a\u00a2\7U\2\2"+ "\u008b\u008c\7k\2\2\u008c\u008d\7u\2\2\u008d\u008f\3\2\2\2\u008e\u0090"+ "\7\"\2\2\u008f\u008e\3\2\2\2\u0090\u0091\3\2\2\2\u0091\u008f\3\2\2\2\u0091"+ "\u0092\3\2\2\2\u0092\u0093\3\2\2\2\u0093\u0094\7p\2\2\u0094\u0095\7q\2"+ "\2\u0095\u00a2\7v\2\2\u0096\u0097\7K\2\2\u0097\u0098\7U\2\2\u0098\u009a"+ "\3\2\2\2\u0099\u009b\7\"\2\2\u009a\u0099\3\2\2\2\u009b\u009c\3\2\2\2\u009c"+ "\u009a\3\2\2\2\u009c\u009d\3\2\2\2\u009d\u009e\3\2\2\2\u009e\u009f\7P"+ "\2\2\u009f\u00a0\7Q\2\2\u00a0\u00a2\7V\2\2\u00a1.\3\2\2\2\u00a1/\3\2\2"+ "\2\u00a1\61\3\2\2\2\u00a1\62\3\2\2\2\u00a1\64\3\2\2\2\u00a1\66\3\2\2\2"+ "\u00a18\3\2\2\2\u00a1;\3\2\2\2\u00a1=\3\2\2\2\u00a1?\3\2\2\2\u00a1J\3"+ "\2\2\2\u00a1U\3\2\2\2\u00a1]\3\2\2\2\u00a1e\3\2\2\2\u00a1v\3\2\2\2\u00a1"+ "\u0087\3\2\2\2\u00a1\u0089\3\2\2\2\u00a1\u008b\3\2\2\2\u00a1\u0096\3\2"+ "\2\2\u00a2\6\3\2\2\2\u00a3\u00a4\7C\2\2\u00a4\u00a5\7P\2\2\u00a5\u00aa"+ "\7F\2\2\u00a6\u00a7\7c\2\2\u00a7\u00a8\7p\2\2\u00a8\u00aa\7f\2\2\u00a9"+ "\u00a3\3\2\2\2\u00a9\u00a6\3\2\2\2\u00aa\b\3\2\2\2\u00ab\u00ac\7Q\2\2"+ "\u00ac\u00b0\7T\2\2\u00ad\u00ae\7q\2\2\u00ae\u00b0\7t\2\2\u00af\u00ab"+ "\3\2\2\2\u00af\u00ad\3\2\2\2\u00b0\n\3\2\2\2\u00b1\u00b3\7B\2\2\u00b2"+ "\u00b4\n\4\2\2\u00b3\u00b2\3\2\2\2\u00b4\u00b5\3\2\2\2\u00b5\u00b3\3\2"+ "\2\2\u00b5\u00b6\3\2\2\2\u00b6\f\3\2\2\2\u00b7\u00bb\5\21\t\2\u00b8\u00bb"+ "\5\17\b\2\u00b9\u00bb\5\27\f\2\u00ba\u00b7\3\2\2\2\u00ba\u00b8\3\2\2\2"+ "\u00ba\u00b9\3\2\2\2\u00bb\16\3\2\2\2\u00bc\u00c0\5\31\r\2\u00bd\u00c0"+ "\5\23\n\2\u00be\u00c0\5\25\13\2\u00bf\u00bc\3\2\2\2\u00bf\u00bd\3\2\2"+ "\2\u00bf\u00be\3\2\2\2\u00c0\20\3\2\2\2\u00c1\u00c2\7G\2\2\u00c2\u00c3"+ "\7Z\2\2\u00c3\u00c8\7R\2\2\u00c4\u00c5\7g\2\2\u00c5\u00c6\7z\2\2\u00c6"+ "\u00c8\7r\2\2\u00c7\u00c1\3\2\2\2\u00c7\u00c4\3\2\2\2\u00c8\u00c9\3\2"+ "\2\2\u00c9\u00cb\5#\22\2\u00ca\u00cc\n\5\2\2\u00cb\u00ca\3\2\2\2\u00cc"+ "\u00cd\3\2\2\2\u00cd\u00cb\3\2\2\2\u00cd\u00ce\3\2\2\2\u00ce\u00cf\3\2"+ "\2\2\u00cf\u00d0\5%\23\2\u00d0\22\3\2\2\2\u00d1\u00d3\7/\2\2\u00d2\u00d1"+ "\3\2\2\2\u00d2\u00d3\3\2\2\2\u00d3\u00d4\3\2\2\2\u00d4\u00d7\5\33\16\2"+ "\u00d5\u00d6\7\60\2\2\u00d6\u00d8\5\33\16\2\u00d7\u00d5\3\2\2\2\u00d7"+ "\u00d8\3\2\2\2\u00d8\24\3\2\2\2\u00d9\u00da\7P\2\2\u00da\u00db\7W\2\2"+ "\u00db\u00dc\7N\2\2\u00dc\u00e2\7N\2\2\u00dd\u00de\7p\2\2\u00de\u00df"+ "\7w\2\2\u00df\u00e0\7n\2\2\u00e0\u00e2\7n\2\2\u00e1\u00d9\3\2\2\2\u00e1"+ "\u00dd\3\2\2\2\u00e2\26\3\2\2\2\u00e3\u00e5\5\37\20\2\u00e4\u00e6\5\17"+ "\b\2\u00e5\u00e4\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00eb\3\2\2\2\u00e7"+ "\u00e8\7.\2\2\u00e8\u00ea\5\17\b\2\u00e9\u00e7\3\2\2\2\u00ea\u00ed\3\2"+ "\2\2\u00eb\u00e9\3\2\2\2\u00eb\u00ec\3\2\2\2\u00ec\u00ee\3\2\2\2\u00ed"+ "\u00eb\3\2\2\2\u00ee\u00ef\5!\21\2\u00ef\30\3\2\2\2\u00f0\u00f1\7$\2\2"+ "\u00f1\u00f2\5\35\17\2\u00f2\u00f3\7$\2\2\u00f3\u00f7\3\2\2\2\u00f4\u00f5"+ "\7$\2\2\u00f5\u00f7\7$\2\2\u00f6\u00f0\3\2\2\2\u00f6\u00f4\3\2\2\2\u00f7"+ "\32\3\2\2\2\u00f8\u00fa\4\62;\2\u00f9\u00f8\3\2\2\2\u00fa\u00fb\3\2\2"+ "\2\u00fb\u00f9\3\2\2\2\u00fb\u00fc\3\2\2\2\u00fc\34\3\2\2\2\u00fd\u0101"+ "\n\6\2\2\u00fe\u00ff\7^\2\2\u00ff\u0101\7$\2\2\u0100\u00fd\3\2\2\2\u0100"+ "\u00fe\3\2\2\2\u0101\u0102\3\2\2\2\u0102\u0100\3\2\2\2\u0102\u0103\3\2"+ "\2\2\u0103\36\3\2\2\2\u0104\u0105\7*\2\2\u0105 \3\2\2\2\u0106\u0107\7"+ "+\2\2\u0107\"\3\2\2\2\u0108\u0109\7}\2\2\u0109$\3\2\2\2\u010a\u010b\7"+ "\177\2\2\u010b&\3\2\2\2\33\2*FQl}\u0091\u009c\u00a1\u00a9\u00af\u00b5"+ "\u00ba\u00bf\u00c7\u00cd\u00d2\u00d7\u00e1\u00e5\u00eb\u00f6\u00fb\u0100"+ "\u0102\3\b\2\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); } } }
package com.mercadopago.android.px.utils; /* * Copyright 2013 Piotr Adamus * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ComposeShader; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Paint.Join; import android.graphics.Paint.Style; import android.graphics.Path; import android.graphics.PorterDuff; import android.graphics.RadialGradient; import android.graphics.RectF; import android.graphics.Shader.TileMode; import android.graphics.SweepGradient; import android.os.Bundle; import android.os.Parcelable; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; public class ColorPicker extends View { /** * Customizable display parameters (in percents) */ private final int paramOuterPadding = 2; // outer padding of the whole color picker view private final int paramInnerPadding = 5; // distance between value slider wheel and inner color wheel private final int paramValueSliderWidth = 10; // width of the value slider private final int paramArrowPointerSize = 4; // size of the arrow pointer; set to 0 to hide the pointer private Paint colorWheelPaint; private Paint valueSliderPaint; private Paint colorViewPaint; private Paint colorPointerPaint; private RectF colorPointerCoords; private Paint valuePointerPaint; private Paint valuePointerArrowPaint; private RectF outerWheelRect; private RectF innerWheelRect; private Path colorViewPath; private Path valueSliderPath; private Path arrowPointerPath; private Bitmap colorWheelBitmap; private int valueSliderWidth; private int innerPadding; private int outerPadding; private int arrowPointerSize; private int outerWheelRadius; private int innerWheelRadius; private int colorWheelRadius; private Matrix gradientRotationMatrix; /** * Currently selected color */ private float[] colorHSV = { 0f, 0f, 1f }; public ColorPicker(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(); } public ColorPicker(Context context, AttributeSet attrs) { super(context, attrs); init(); } public ColorPicker(Context context) { super(context); init(); } private void init() { colorPointerPaint = new Paint(); colorPointerPaint.setStyle(Style.STROKE); colorPointerPaint.setStrokeWidth(2f); colorPointerPaint.setARGB(128, 0, 0, 0); valuePointerPaint = new Paint(); valuePointerPaint.setStyle(Style.STROKE); valuePointerPaint.setStrokeWidth(2f); valuePointerArrowPaint = new Paint(); colorWheelPaint = new Paint(); colorWheelPaint.setAntiAlias(true); colorWheelPaint.setDither(true); valueSliderPaint = new Paint(); valueSliderPaint.setAntiAlias(true); valueSliderPaint.setDither(true); colorViewPaint = new Paint(); colorViewPaint.setAntiAlias(true); colorViewPath = new Path(); valueSliderPath = new Path(); arrowPointerPath = new Path(); outerWheelRect = new RectF(); innerWheelRect = new RectF(); colorPointerCoords = new RectF(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int widthSize = MeasureSpec.getSize(widthMeasureSpec); int heightSize = MeasureSpec.getSize(heightMeasureSpec); int size = Math.min(widthSize, heightSize); setMeasuredDimension(size, size); } @SuppressLint("DrawAllocation") @Override protected void onDraw(Canvas canvas) { int centerX = getWidth() / 2; int centerY = getHeight() / 2; // drawing color wheel canvas.drawBitmap(colorWheelBitmap, centerX - colorWheelRadius, centerY - colorWheelRadius, null); // drawing color view colorViewPaint.setColor(Color.HSVToColor(colorHSV)); canvas.drawPath(colorViewPath, colorViewPaint); // drawing value slider float[] hsv = { colorHSV[0], colorHSV[1], 1f }; SweepGradient sweepGradient = new SweepGradient(centerX, centerY, new int[] { Color.BLACK, Color.HSVToColor(hsv), Color.WHITE }, null); sweepGradient.setLocalMatrix(gradientRotationMatrix); valueSliderPaint.setShader(sweepGradient); canvas.drawPath(valueSliderPath, valueSliderPaint); // drawing color wheel pointer float hueAngle = (float) Math.toRadians(colorHSV[0]); int colorPointX = (int) (-Math.cos(hueAngle) * colorHSV[1] * colorWheelRadius) + centerX; int colorPointY = (int) (-Math.sin(hueAngle) * colorHSV[1] * colorWheelRadius) + centerY; float pointerRadius = 0.075f * colorWheelRadius; int pointerX = (int) (colorPointX - pointerRadius / 2); int pointerY = (int) (colorPointY - pointerRadius / 2); colorPointerCoords.set(pointerX, pointerY, pointerX + pointerRadius, pointerY + pointerRadius); canvas.drawOval(colorPointerCoords, colorPointerPaint); // drawing value pointer valuePointerPaint.setColor(Color.HSVToColor(new float[] { 0f, 0f, 1f - colorHSV[2] })); double valueAngle = (colorHSV[2] - 0.5f) * Math.PI; float valueAngleX = (float) Math.cos(valueAngle); float valueAngleY = (float) Math.sin(valueAngle); canvas.drawLine(valueAngleX * innerWheelRadius + centerX, valueAngleY * innerWheelRadius + centerY, valueAngleX * outerWheelRadius + centerX, valueAngleY * outerWheelRadius + centerY, valuePointerPaint); // drawing pointer arrow if (arrowPointerSize > 0) { drawPointerArrow(canvas); } } private void drawPointerArrow(Canvas canvas) { int centerX = getWidth() / 2; int centerY = getHeight() / 2; double tipAngle = (colorHSV[2] - 0.5f) * Math.PI; double leftAngle = tipAngle + Math.PI / 96; double rightAngle = tipAngle - Math.PI / 96; double tipAngleX = Math.cos(tipAngle) * outerWheelRadius; double tipAngleY = Math.sin(tipAngle) * outerWheelRadius; double leftAngleX = Math.cos(leftAngle) * (outerWheelRadius + arrowPointerSize); double leftAngleY = Math.sin(leftAngle) * (outerWheelRadius + arrowPointerSize); double rightAngleX = Math.cos(rightAngle) * (outerWheelRadius + arrowPointerSize); double rightAngleY = Math.sin(rightAngle) * (outerWheelRadius + arrowPointerSize); arrowPointerPath.reset(); arrowPointerPath.moveTo((float) tipAngleX + centerX, (float) tipAngleY + centerY); arrowPointerPath.lineTo((float) leftAngleX + centerX, (float) leftAngleY + centerY); arrowPointerPath.lineTo((float) rightAngleX + centerX, (float) rightAngleY + centerY); arrowPointerPath.lineTo((float) tipAngleX + centerX, (float) tipAngleY + centerY); valuePointerArrowPaint.setColor(Color.HSVToColor(colorHSV)); valuePointerArrowPaint.setStyle(Style.FILL); canvas.drawPath(arrowPointerPath, valuePointerArrowPaint); valuePointerArrowPaint.setStyle(Style.STROKE); valuePointerArrowPaint.setStrokeJoin(Join.ROUND); valuePointerArrowPaint.setColor(Color.BLACK); canvas.drawPath(arrowPointerPath, valuePointerArrowPaint); } @Override protected void onSizeChanged(int width, int height, int oldw, int oldh) { int centerX = width / 2; int centerY = height / 2; innerPadding = paramInnerPadding * width / 100; outerPadding = paramOuterPadding * width / 100; arrowPointerSize = paramArrowPointerSize * width / 100; valueSliderWidth = paramValueSliderWidth * width / 100; outerWheelRadius = width / 2 - outerPadding - arrowPointerSize; innerWheelRadius = outerWheelRadius - valueSliderWidth; colorWheelRadius = innerWheelRadius - innerPadding; outerWheelRect.set(centerX - outerWheelRadius, centerY - outerWheelRadius, centerX + outerWheelRadius, centerY + outerWheelRadius); innerWheelRect.set(centerX - innerWheelRadius, centerY - innerWheelRadius, centerX + innerWheelRadius, centerY + innerWheelRadius); colorWheelBitmap = createColorWheelBitmap(colorWheelRadius * 2, colorWheelRadius * 2); gradientRotationMatrix = new Matrix(); gradientRotationMatrix.preRotate(270, width / 2, height / 2); colorViewPath.arcTo(outerWheelRect, 270, -180); colorViewPath.arcTo(innerWheelRect, 90, 180); valueSliderPath.arcTo(outerWheelRect, 270, 180); valueSliderPath.arcTo(innerWheelRect, 90, -180); } private Bitmap createColorWheelBitmap(int width, int height) { Bitmap bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888); int colorCount = 12; int colorAngleStep = 360 / 12; int colors[] = new int[colorCount + 1]; float hsv[] = { 0f, 1f, 1f }; for (int i = 0; i < colors.length; i++) { hsv[0] = (i * colorAngleStep + 180) % 360; colors[i] = Color.HSVToColor(hsv); } colors[colorCount] = colors[0]; SweepGradient sweepGradient = new SweepGradient(width / 2, height / 2, colors, null); RadialGradient radialGradient = new RadialGradient(width / 2, height / 2, colorWheelRadius, 0xFFFFFFFF, 0x00FFFFFF, TileMode.CLAMP); ComposeShader composeShader = new ComposeShader(sweepGradient, radialGradient, PorterDuff.Mode.SRC_OVER); colorWheelPaint.setShader(composeShader); Canvas canvas = new Canvas(bitmap); canvas.drawCircle(width / 2, height / 2, colorWheelRadius, colorWheelPaint); return bitmap; } @Override public boolean onTouchEvent(MotionEvent event) { int action = event.getAction(); switch (action) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_MOVE: int x = (int) event.getX(); int y = (int) event.getY(); int cx = x - getWidth() / 2; int cy = y - getHeight() / 2; double d = Math.sqrt(cx * cx + cy * cy); if (d <= colorWheelRadius) { colorHSV[0] = (float) (Math.toDegrees(Math.atan2(cy, cx)) + 180f); colorHSV[1] = Math.max(0f, Math.min(1f, (float) (d / colorWheelRadius))); invalidate(); } else if (x >= getWidth() / 2 && d >= innerWheelRadius) { colorHSV[2] = (float) Math.max(0, Math.min(1, Math.atan2(cy, cx) / Math.PI + 0.5f)); invalidate(); } return true; } return super.onTouchEvent(event); } public int getColor() { return Color.HSVToColor(colorHSV); } public void setColor(int color) { Color.colorToHSV(color, colorHSV); } @Override protected Parcelable onSaveInstanceState() { Bundle state = new Bundle(); state.putFloatArray("color", colorHSV); state.putParcelable("super", super.onSaveInstanceState()); return state; } @Override protected void onRestoreInstanceState(Parcelable state) { if (state instanceof Bundle) { Bundle bundle = (Bundle) state; colorHSV = bundle.getFloatArray("color"); super.onRestoreInstanceState(bundle.getParcelable("super")); } else { super.onRestoreInstanceState(state); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.web.api; import com.wordnik.swagger.annotations.Api; import com.wordnik.swagger.annotations.ApiOperation; import com.wordnik.swagger.annotations.ApiParam; import com.wordnik.swagger.annotations.ApiResponse; import com.wordnik.swagger.annotations.ApiResponses; import com.wordnik.swagger.annotations.Authorization; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.authorization.Authorizer; import org.apache.nifi.authorization.RequestAction; import org.apache.nifi.authorization.resource.Authorizable; import org.apache.nifi.authorization.user.NiFiUserUtils; import org.apache.nifi.web.NiFiServiceFacade; import org.apache.nifi.web.Revision; import org.apache.nifi.web.api.dto.LabelDTO; import org.apache.nifi.web.api.dto.PositionDTO; import org.apache.nifi.web.api.entity.LabelEntity; import org.apache.nifi.web.api.request.ClientIdParameter; import org.apache.nifi.web.api.request.LongParameter; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.HttpMethod; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.Set; /** * RESTful endpoint for managing a Label. */ @Path("/labels") @Api( value = "/labels", description = "Endpoint for managing a Label." ) public class LabelResource extends ApplicationResource { private NiFiServiceFacade serviceFacade; private Authorizer authorizer; /** * Populates the uri for the specified labels. * * @param labelEntities labels * @return entites */ public Set<LabelEntity> populateRemainingLabelEntitiesContent(Set<LabelEntity> labelEntities) { for (LabelEntity labelEntity : labelEntities) { populateRemainingLabelEntityContent(labelEntity); } return labelEntities; } /** * Populates the uri for the specified labels. * * @param labelEntity label * @return entities */ public LabelEntity populateRemainingLabelEntityContent(LabelEntity labelEntity) { labelEntity.setUri(generateResourceUri("labels", labelEntity.getId())); return labelEntity; } /** * Retrieves the specified label. * * @param id The id of the label to retrieve * @return A labelEntity. */ @GET @Consumes(MediaType.WILDCARD) @Produces(MediaType.APPLICATION_JSON) @Path("{id}") @ApiOperation( value = "Gets a label", response = LabelEntity.class, authorizations = { @Authorization(value = "Read - /labels/{uuid}", type = "") } ) @ApiResponses( value = { @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."), @ApiResponse(code = 401, message = "Client could not be authenticated."), @ApiResponse(code = 403, message = "Client is not authorized to make this request."), @ApiResponse(code = 404, message = "The specified resource could not be found."), @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.") } ) public Response getLabel( @ApiParam( value = "The label id.", required = true ) @PathParam("id") final String id) { if (isReplicateRequest()) { return replicate(HttpMethod.GET); } // authorize access serviceFacade.authorizeAccess(lookup -> { final Authorizable label = lookup.getLabel(id); label.authorize(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser()); }); // get the label final LabelEntity entity = serviceFacade.getLabel(id); populateRemainingLabelEntityContent(entity); return clusterContext(generateOkResponse(entity)).build(); } /** * Updates the specified label. * * @param httpServletRequest request * @param id The id of the label to update. * @param requestLabelEntity A labelEntity. * @return A labelEntity. */ @PUT @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) @Path("{id}") @ApiOperation( value = "Updates a label", response = LabelEntity.class, authorizations = { @Authorization(value = "Write - /labels/{uuid}", type = "") } ) @ApiResponses( value = { @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."), @ApiResponse(code = 401, message = "Client could not be authenticated."), @ApiResponse(code = 403, message = "Client is not authorized to make this request."), @ApiResponse(code = 404, message = "The specified resource could not be found."), @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.") } ) public Response updateLabel( @Context final HttpServletRequest httpServletRequest, @ApiParam( value = "The label id.", required = true ) @PathParam("id") final String id, @ApiParam( value = "The label configuration details.", required = true ) final LabelEntity requestLabelEntity) { if (requestLabelEntity == null || requestLabelEntity.getComponent() == null) { throw new IllegalArgumentException("Label details must be specified."); } if (requestLabelEntity.getRevision() == null) { throw new IllegalArgumentException("Revision must be specified."); } // ensure the ids are the same final LabelDTO requestLabelDTO = requestLabelEntity.getComponent(); if (!id.equals(requestLabelDTO.getId())) { throw new IllegalArgumentException(String.format("The label id (%s) in the request body does not equal the " + "label id of the requested resource (%s).", requestLabelDTO.getId(), id)); } final PositionDTO proposedPosition = requestLabelDTO.getPosition(); if (proposedPosition != null) { if (proposedPosition.getX() == null || proposedPosition.getY() == null) { throw new IllegalArgumentException("The x and y coordinate of the proposed position must be specified."); } } if (isReplicateRequest()) { return replicate(HttpMethod.PUT, requestLabelEntity); } // handle expects request (usually from the cluster manager) final Revision requestRevision = getRevision(requestLabelEntity, id); return withWriteLock( serviceFacade, requestLabelEntity, requestRevision, lookup -> { Authorizable authorizable = lookup.getLabel(id); authorizable.authorize(authorizer, RequestAction.WRITE, NiFiUserUtils.getNiFiUser()); }, null, (revision, labelEntity) -> { final LabelDTO labelDTO = labelEntity.getComponent(); // update the label final LabelEntity entity = serviceFacade.updateLabel(revision, labelDTO); populateRemainingLabelEntityContent(entity); return clusterContext(generateOkResponse(entity)).build(); } ); } /** * Removes the specified label. * * @param httpServletRequest request * @param version The revision is used to verify the client is working with the latest version of the flow. * @param clientId Optional client id. If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response. * @param id The id of the label to remove. * @return A entity containing the client id and an updated revision. */ @DELETE @Consumes(MediaType.WILDCARD) @Produces(MediaType.APPLICATION_JSON) @Path("{id}") @ApiOperation( value = "Deletes a label", response = LabelEntity.class, authorizations = { @Authorization(value = "Write - /labels/{uuid}", type = ""), @Authorization(value = "Write - Parent Process Group - /process-groups/{uuid}", type = "") } ) @ApiResponses( value = { @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."), @ApiResponse(code = 401, message = "Client could not be authenticated."), @ApiResponse(code = 403, message = "Client is not authorized to make this request."), @ApiResponse(code = 404, message = "The specified resource could not be found."), @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.") } ) public Response removeLabel( @Context final HttpServletRequest httpServletRequest, @ApiParam( value = "The revision is used to verify the client is working with the latest version of the flow.", required = false ) @QueryParam(VERSION) final LongParameter version, @ApiParam( value = "If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.", required = false ) @QueryParam(CLIENT_ID) @DefaultValue(StringUtils.EMPTY) final ClientIdParameter clientId, @ApiParam( value = "The label id.", required = true ) @PathParam("id") final String id) { if (isReplicateRequest()) { return replicate(HttpMethod.DELETE); } final LabelEntity requestLabelEntity = new LabelEntity(); requestLabelEntity.setId(id); // handle expects request (usually from the cluster manager) final Revision requestRevision = new Revision(version == null ? null : version.getLong(), clientId.getClientId(), id); return withWriteLock( serviceFacade, requestLabelEntity, requestRevision, lookup -> { final Authorizable label = lookup.getLabel(id); // ensure write permission to the label label.authorize(authorizer, RequestAction.WRITE, NiFiUserUtils.getNiFiUser()); // ensure write permission to the parent process group label.getParentAuthorizable().authorize(authorizer, RequestAction.WRITE, NiFiUserUtils.getNiFiUser()); }, null, (revision, labelEntity) -> { // delete the specified label final LabelEntity entity = serviceFacade.deleteLabel(revision, labelEntity.getId()); return clusterContext(generateOkResponse(entity)).build(); } ); } // setters public void setServiceFacade(NiFiServiceFacade serviceFacade) { this.serviceFacade = serviceFacade; } public void setAuthorizer(Authorizer authorizer) { this.authorizer = authorizer; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/bigtable/admin/table/v1/bigtable_table_service_messages.proto package com.google.bigtable.admin.table.v1; /** * Protobuf type {@code google.bigtable.admin.table.v1.CreateColumnFamilyRequest} */ public final class CreateColumnFamilyRequest extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:google.bigtable.admin.table.v1.CreateColumnFamilyRequest) CreateColumnFamilyRequestOrBuilder { // Use CreateColumnFamilyRequest.newBuilder() to construct. private CreateColumnFamilyRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); } private CreateColumnFamilyRequest() { name_ = ""; columnFamilyId_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private CreateColumnFamilyRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { String s = input.readStringRequireUtf8(); name_ = s; break; } case 18: { String s = input.readStringRequireUtf8(); columnFamilyId_ = s; break; } case 26: { com.google.bigtable.admin.table.v1.ColumnFamily.Builder subBuilder = null; if (columnFamily_ != null) { subBuilder = columnFamily_.toBuilder(); } columnFamily_ = input.readMessage(com.google.bigtable.admin.table.v1.ColumnFamily.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(columnFamily_); columnFamily_ = subBuilder.buildPartial(); } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw new RuntimeException(e.setUnfinishedMessage(this)); } catch (java.io.IOException e) { throw new RuntimeException( new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this)); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_CreateColumnFamilyRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_CreateColumnFamilyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest.class, com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <code>optional string name = 1;</code> * * <pre> * The unique name of the table in which to create the new column family. * </pre> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * <code>optional string name = 1;</code> * * <pre> * The unique name of the table in which to create the new column family. * </pre> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int COLUMN_FAMILY_ID_FIELD_NUMBER = 2; private volatile java.lang.Object columnFamilyId_; /** * <code>optional string column_family_id = 2;</code> * * <pre> * The name by which the new column family should be referred to within the * table, e.g. "foobar" rather than "&lt;table_name&gt;/columnFamilies/foobar". * </pre> */ public java.lang.String getColumnFamilyId() { java.lang.Object ref = columnFamilyId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); columnFamilyId_ = s; return s; } } /** * <code>optional string column_family_id = 2;</code> * * <pre> * The name by which the new column family should be referred to within the * table, e.g. "foobar" rather than "&lt;table_name&gt;/columnFamilies/foobar". * </pre> */ public com.google.protobuf.ByteString getColumnFamilyIdBytes() { java.lang.Object ref = columnFamilyId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); columnFamilyId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int COLUMN_FAMILY_FIELD_NUMBER = 3; private com.google.bigtable.admin.table.v1.ColumnFamily columnFamily_; /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public boolean hasColumnFamily() { return columnFamily_ != null; } /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public com.google.bigtable.admin.table.v1.ColumnFamily getColumnFamily() { return columnFamily_ == null ? com.google.bigtable.admin.table.v1.ColumnFamily.getDefaultInstance() : columnFamily_; } /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public com.google.bigtable.admin.table.v1.ColumnFamilyOrBuilder getColumnFamilyOrBuilder() { return getColumnFamily(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 1, name_); } if (!getColumnFamilyIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessage.writeString(output, 2, columnFamilyId_); } if (columnFamily_ != null) { output.writeMessage(3, getColumnFamily()); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(1, name_); } if (!getColumnFamilyIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessage.computeStringSize(2, columnFamilyId_); } if (columnFamily_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, getColumnFamily()); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.bigtable.admin.table.v1.CreateColumnFamilyRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.bigtable.admin.table.v1.CreateColumnFamilyRequest) com.google.bigtable.admin.table.v1.CreateColumnFamilyRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_CreateColumnFamilyRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_CreateColumnFamilyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest.class, com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest.Builder.class); } // Construct using com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); name_ = ""; columnFamilyId_ = ""; if (columnFamilyBuilder_ == null) { columnFamily_ = null; } else { columnFamily_ = null; columnFamilyBuilder_ = null; } return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_CreateColumnFamilyRequest_descriptor; } public com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest getDefaultInstanceForType() { return com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest.getDefaultInstance(); } public com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest build() { com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest buildPartial() { com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest result = new com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest(this); result.name_ = name_; result.columnFamilyId_ = columnFamilyId_; if (columnFamilyBuilder_ == null) { result.columnFamily_ = columnFamily_; } else { result.columnFamily_ = columnFamilyBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest) { return mergeFrom((com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest other) { if (other == com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } if (!other.getColumnFamilyId().isEmpty()) { columnFamilyId_ = other.columnFamilyId_; onChanged(); } if (other.hasColumnFamily()) { mergeColumnFamily(other.getColumnFamily()); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * <code>optional string name = 1;</code> * * <pre> * The unique name of the table in which to create the new column family. * </pre> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string name = 1;</code> * * <pre> * The unique name of the table in which to create the new column family. * </pre> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string name = 1;</code> * * <pre> * The unique name of the table in which to create the new column family. * </pre> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * <code>optional string name = 1;</code> * * <pre> * The unique name of the table in which to create the new column family. * </pre> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>optional string name = 1;</code> * * <pre> * The unique name of the table in which to create the new column family. * </pre> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } private java.lang.Object columnFamilyId_ = ""; /** * <code>optional string column_family_id = 2;</code> * * <pre> * The name by which the new column family should be referred to within the * table, e.g. "foobar" rather than "&lt;table_name&gt;/columnFamilies/foobar". * </pre> */ public java.lang.String getColumnFamilyId() { java.lang.Object ref = columnFamilyId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); columnFamilyId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string column_family_id = 2;</code> * * <pre> * The name by which the new column family should be referred to within the * table, e.g. "foobar" rather than "&lt;table_name&gt;/columnFamilies/foobar". * </pre> */ public com.google.protobuf.ByteString getColumnFamilyIdBytes() { java.lang.Object ref = columnFamilyId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); columnFamilyId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string column_family_id = 2;</code> * * <pre> * The name by which the new column family should be referred to within the * table, e.g. "foobar" rather than "&lt;table_name&gt;/columnFamilies/foobar". * </pre> */ public Builder setColumnFamilyId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } columnFamilyId_ = value; onChanged(); return this; } /** * <code>optional string column_family_id = 2;</code> * * <pre> * The name by which the new column family should be referred to within the * table, e.g. "foobar" rather than "&lt;table_name&gt;/columnFamilies/foobar". * </pre> */ public Builder clearColumnFamilyId() { columnFamilyId_ = getDefaultInstance().getColumnFamilyId(); onChanged(); return this; } /** * <code>optional string column_family_id = 2;</code> * * <pre> * The name by which the new column family should be referred to within the * table, e.g. "foobar" rather than "&lt;table_name&gt;/columnFamilies/foobar". * </pre> */ public Builder setColumnFamilyIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); columnFamilyId_ = value; onChanged(); return this; } private com.google.bigtable.admin.table.v1.ColumnFamily columnFamily_ = null; private com.google.protobuf.SingleFieldBuilder< com.google.bigtable.admin.table.v1.ColumnFamily, com.google.bigtable.admin.table.v1.ColumnFamily.Builder, com.google.bigtable.admin.table.v1.ColumnFamilyOrBuilder> columnFamilyBuilder_; /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public boolean hasColumnFamily() { return columnFamilyBuilder_ != null || columnFamily_ != null; } /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public com.google.bigtable.admin.table.v1.ColumnFamily getColumnFamily() { if (columnFamilyBuilder_ == null) { return columnFamily_ == null ? com.google.bigtable.admin.table.v1.ColumnFamily.getDefaultInstance() : columnFamily_; } else { return columnFamilyBuilder_.getMessage(); } } /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public Builder setColumnFamily(com.google.bigtable.admin.table.v1.ColumnFamily value) { if (columnFamilyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } columnFamily_ = value; onChanged(); } else { columnFamilyBuilder_.setMessage(value); } return this; } /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public Builder setColumnFamily( com.google.bigtable.admin.table.v1.ColumnFamily.Builder builderForValue) { if (columnFamilyBuilder_ == null) { columnFamily_ = builderForValue.build(); onChanged(); } else { columnFamilyBuilder_.setMessage(builderForValue.build()); } return this; } /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public Builder mergeColumnFamily(com.google.bigtable.admin.table.v1.ColumnFamily value) { if (columnFamilyBuilder_ == null) { if (columnFamily_ != null) { columnFamily_ = com.google.bigtable.admin.table.v1.ColumnFamily.newBuilder(columnFamily_).mergeFrom(value).buildPartial(); } else { columnFamily_ = value; } onChanged(); } else { columnFamilyBuilder_.mergeFrom(value); } return this; } /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public Builder clearColumnFamily() { if (columnFamilyBuilder_ == null) { columnFamily_ = null; onChanged(); } else { columnFamily_ = null; columnFamilyBuilder_ = null; } return this; } /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public com.google.bigtable.admin.table.v1.ColumnFamily.Builder getColumnFamilyBuilder() { onChanged(); return getColumnFamilyFieldBuilder().getBuilder(); } /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ public com.google.bigtable.admin.table.v1.ColumnFamilyOrBuilder getColumnFamilyOrBuilder() { if (columnFamilyBuilder_ != null) { return columnFamilyBuilder_.getMessageOrBuilder(); } else { return columnFamily_ == null ? com.google.bigtable.admin.table.v1.ColumnFamily.getDefaultInstance() : columnFamily_; } } /** * <code>optional .google.bigtable.admin.table.v1.ColumnFamily column_family = 3;</code> * * <pre> * The column family to create. The `name` field must be left blank. * </pre> */ private com.google.protobuf.SingleFieldBuilder< com.google.bigtable.admin.table.v1.ColumnFamily, com.google.bigtable.admin.table.v1.ColumnFamily.Builder, com.google.bigtable.admin.table.v1.ColumnFamilyOrBuilder> getColumnFamilyFieldBuilder() { if (columnFamilyBuilder_ == null) { columnFamilyBuilder_ = new com.google.protobuf.SingleFieldBuilder< com.google.bigtable.admin.table.v1.ColumnFamily, com.google.bigtable.admin.table.v1.ColumnFamily.Builder, com.google.bigtable.admin.table.v1.ColumnFamilyOrBuilder>( getColumnFamily(), getParentForChildren(), isClean()); columnFamily_ = null; } return columnFamilyBuilder_; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.bigtable.admin.table.v1.CreateColumnFamilyRequest) } // @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.CreateColumnFamilyRequest) private static final com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest(); } public static com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateColumnFamilyRequest> PARSER = new com.google.protobuf.AbstractParser<CreateColumnFamilyRequest>() { public CreateColumnFamilyRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { try { return new CreateColumnFamilyRequest(input, extensionRegistry); } catch (RuntimeException e) { if (e.getCause() instanceof com.google.protobuf.InvalidProtocolBufferException) { throw (com.google.protobuf.InvalidProtocolBufferException) e.getCause(); } throw e; } } }; public static com.google.protobuf.Parser<CreateColumnFamilyRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateColumnFamilyRequest> getParserForType() { return PARSER; } public com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.miz.functions; import android.annotation.TargetApi; import android.os.Handler; import android.os.Message; import android.os.Process; import java.util.ArrayDeque; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; /** * ************************************* * Copied from JB release framework: * https://android.googlesource.com/platform/frameworks/base/+/jb-release/core/java/android/os/AsyncTask.java * * so that threading behavior on all OS versions is the same and we can tweak behavior by using * executeOnExecutor() if needed. * * There are 3 changes in this copy of AsyncTask: * -pre-HC a single thread executor is used for serial operation * (Executors.newSingleThreadExecutor) and is the default * -the default THREAD_POOL_EXECUTOR was changed to use DiscardOldestPolicy * -a new fixed thread pool called DUAL_THREAD_EXECUTOR was added * ************************************* * * <p>AsyncTask enables proper and easy use of the UI thread. This class allows to * perform background operations and publish results on the UI thread without * having to manipulate threads and/or handlers.</p> * * <p>AsyncTask is designed to be a helper class around {@link Thread} and {@link Handler} * and does not constitute a generic threading framework. AsyncTasks should ideally be * used for short operations (a few seconds at the most.) If you need to keep threads * running for long periods of time, it is highly recommended you use the various APIs * provided by the <code>java.util.concurrent</code> pacakge such as {@link Executor}, * {@link ThreadPoolExecutor} and {@link FutureTask}.</p> * * <p>An asynchronous task is defined by a computation that runs on a background thread and * whose result is published on the UI thread. An asynchronous task is defined by 3 generic * types, called <code>Params</code>, <code>Progress</code> and <code>Result</code>, * and 4 steps, called <code>onPreExecute</code>, <code>doInBackground</code>, * <code>onProgressUpdate</code> and <code>onPostExecute</code>.</p> * * <div class="special reference"> * <h3>Developer Guides</h3> * <p>For more information about using tasks and threads, read the * <a href="{@docRoot}guide/topics/fundamentals/processes-and-threads.html">Processes and * Threads</a> developer guide.</p> * </div> * * <h2>Usage</h2> * <p>AsyncTask must be subclassed to be used. The subclass will override at least * one method ({@link #doInBackground}), and most often will override a * second one ({@link #onPostExecute}.)</p> * * <p>Here is an example of subclassing:</p> * <pre class="prettyprint"> * private class DownloadFilesTask extends AsyncTask&lt;URL, Integer, Long&gt; { * protected Long doInBackground(URL... urls) { * int count = urls.length; * long totalSize = 0; * for (int i = 0; i < count; i++) { * totalSize += Downloader.downloadFile(urls[i]); * publishProgress((int) ((i / (float) count) * 100)); * // Escape early if cancel() is called * if (isCancelled()) break; * } * return totalSize; * } * * protected void onProgressUpdate(Integer... progress) { * setProgressPercent(progress[0]); * } * * protected void onPostExecute(Long result) { * showDialog("Downloaded " + result + " bytes"); * } * } * </pre> * * <p>Once created, a task is executed very simply:</p> * <pre class="prettyprint"> * new DownloadFilesTask().execute(url1, url2, url3); * </pre> * * <h2>AsyncTask's generic types</h2> * <p>The three types used by an asynchronous task are the following:</p> * <ol> * <li><code>Params</code>, the type of the parameters sent to the task upon * execution.</li> * <li><code>Progress</code>, the type of the progress units published during * the background computation.</li> * <li><code>Result</code>, the type of the result of the background * computation.</li> * </ol> * <p>Not all types are always used by an asynchronous task. To mark a type as unused, * simply use the type {@link Void}:</p> * <pre> * private class MyTask extends AsyncTask&lt;Void, Void, Void&gt; { ... } * </pre> * * <h2>The 4 steps</h2> * <p>When an asynchronous task is executed, the task goes through 4 steps:</p> * <ol> * <li>{@link #onPreExecute()}, invoked on the UI thread immediately after the task * is executed. This step is normally used to setup the task, for instance by * showing a progress bar in the user interface.</li> * <li>{@link #doInBackground}, invoked on the background thread * immediately after {@link #onPreExecute()} finishes executing. This step is used * to perform background computation that can take a long time. The parameters * of the asynchronous task are passed to this step. The result of the computation must * be returned by this step and will be passed back to the last step. This step * can also use {@link #publishProgress} to publish one or more units * of progress. These values are published on the UI thread, in the * {@link #onProgressUpdate} step.</li> * <li>{@link #onProgressUpdate}, invoked on the UI thread after a * call to {@link #publishProgress}. The timing of the execution is * undefined. This method is used to display any form of progress in the user * interface while the background computation is still executing. For instance, * it can be used to animate a progress bar or show logs in a text field.</li> * <li>{@link #onPostExecute}, invoked on the UI thread after the background * computation finishes. The result of the background computation is passed to * this step as a parameter.</li> * </ol> * * <h2>Cancelling a task</h2> * <p>A task can be cancelled at any time by invoking {@link #cancel(boolean)}. Invoking * this method will cause subsequent calls to {@link #isCancelled()} to return true. * After invoking this method, {@link #onCancelled(Object)}, instead of * {@link #onPostExecute(Object)} will be invoked after {@link #doInBackground(Object[])} * returns. To ensure that a task is cancelled as quickly as possible, you should always * check the return value of {@link #isCancelled()} periodically from * {@link #doInBackground(Object[])}, if possible (inside a loop for instance.)</p> * * <h2>Threading rules</h2> * <p>There are a few threading rules that must be followed for this class to * work properly:</p> * <ul> * <li>The AsyncTask class must be loaded on the UI thread. This is done * automatically as of {@link android.os.Build.VERSION_CODES#JELLY_BEAN}.</li> * <li>The task instance must be created on the UI thread.</li> * <li>{@link #execute} must be invoked on the UI thread.</li> * <li>Do not call {@link #onPreExecute()}, {@link #onPostExecute}, * {@link #doInBackground}, {@link #onProgressUpdate} manually.</li> * <li>The task can be executed only once (an exception will be thrown if * a second execution is attempted.)</li> * </ul> * * <h2>Memory observability</h2> * <p>AsyncTask guarantees that all callback calls are synchronized in such a way that the following * operations are safe without explicit synchronizations.</p> * <ul> * <li>Set member fields in the constructor or {@link #onPreExecute}, and refer to them * in {@link #doInBackground}. * <li>Set member fields in {@link #doInBackground}, and refer to them in * {@link #onProgressUpdate} and {@link #onPostExecute}. * </ul> * * <h2>Order of execution</h2> * <p>When first introduced, AsyncTasks were executed serially on a single background * thread. Starting with {@link android.os.Build.VERSION_CODES#DONUT}, this was changed * to a pool of threads allowing multiple tasks to operate in parallel. Starting with * {@link android.os.Build.VERSION_CODES#HONEYCOMB}, tasks are executed on a single * thread to avoid common application errors caused by parallel execution.</p> * <p>If you truly want parallel execution, you can invoke * {@link #executeOnExecutor(java.util.concurrent.Executor, Object[])} with * {@link #THREAD_POOL_EXECUTOR}.</p> */ public abstract class LibrarySectionAsyncTask<Params, Progress, Result> { private static final String LOG_TAG = "AsyncTask"; private static final int CORE_POOL_SIZE = 2; private static final int MAXIMUM_POOL_SIZE = 2; private static final int KEEP_ALIVE = 1; private static final ThreadFactory sThreadFactory = new ThreadFactory() { private final AtomicInteger mCount = new AtomicInteger(1); public Thread newThread(Runnable r) { return new Thread(r, "AsyncTask #" + mCount.getAndIncrement()); } }; private static final BlockingQueue<Runnable> sPoolWorkQueue = new LinkedBlockingQueue<Runnable>(10); /** * An {@link Executor} that can be used to execute tasks in parallel. */ public static final Executor THREAD_POOL_EXECUTOR = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE, TimeUnit.SECONDS, sPoolWorkQueue, sThreadFactory, new ThreadPoolExecutor.DiscardOldestPolicy()); /** * An {@link Executor} that executes tasks one at a time in serial * order. This serialization is global to a particular process. */ public static final Executor SERIAL_EXECUTOR = new SerialExecutor(); public static final Executor DUAL_THREAD_EXECUTOR = Executors.newFixedThreadPool(2, sThreadFactory); private static final int MESSAGE_POST_RESULT = 0x1; private static final int MESSAGE_POST_PROGRESS = 0x2; private static final InternalHandler sHandler = new InternalHandler(); private static volatile Executor sDefaultExecutor = SERIAL_EXECUTOR; private final WorkerRunnable<Params, Result> mWorker; private final FutureTask<Result> mFuture; private volatile Status mStatus = Status.PENDING; private final AtomicBoolean mCancelled = new AtomicBoolean(); private final AtomicBoolean mTaskInvoked = new AtomicBoolean(); @TargetApi(11) private static class SerialExecutor implements Executor { final ArrayDeque<Runnable> mTasks = new ArrayDeque<Runnable>(); Runnable mActive; public synchronized void execute(final Runnable r) { mTasks.offer(new Runnable() { public void run() { try { r.run(); } finally { scheduleNext(); } } }); if (mActive == null) { scheduleNext(); } } protected synchronized void scheduleNext() { if ((mActive = mTasks.poll()) != null) { THREAD_POOL_EXECUTOR.execute(mActive); } } } /** * Indicates the current status of the task. Each status will be set only once * during the lifetime of a task. */ public enum Status { /** * Indicates that the task has not been executed yet. */ PENDING, /** * Indicates that the task is running. */ RUNNING, /** * Indicates that {@link AsyncTask#onPostExecute} has finished. */ FINISHED, } /** @hide Used to force static handler to be created. */ public static void init() { sHandler.getLooper(); } /** @hide */ public static void setDefaultExecutor(Executor exec) { sDefaultExecutor = exec; } /** * Creates a new asynchronous task. This constructor must be invoked on the UI thread. */ public LibrarySectionAsyncTask() { mWorker = new WorkerRunnable<Params, Result>() { public Result call() throws Exception { mTaskInvoked.set(true); Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND); //noinspection unchecked return postResult(doInBackground(mParams)); } }; mFuture = new FutureTask<Result>(mWorker) { @Override protected void done() { try { postResultIfNotInvoked(get()); } catch (InterruptedException e) { android.util.Log.w(LOG_TAG, e); } catch (ExecutionException e) { throw new RuntimeException("An error occured while executing doInBackground()", e.getCause()); } catch (CancellationException e) { postResultIfNotInvoked(null); } } }; } private void postResultIfNotInvoked(Result result) { final boolean wasTaskInvoked = mTaskInvoked.get(); if (!wasTaskInvoked) { postResult(result); } } private Result postResult(Result result) { @SuppressWarnings("unchecked") Message message = sHandler.obtainMessage(MESSAGE_POST_RESULT, new AsyncTaskResult<Result>(this, result)); message.sendToTarget(); return result; } /** * Returns the current status of this task. * * @return The current status. */ public final Status getStatus() { return mStatus; } /** * Override this method to perform a computation on a background thread. The * specified parameters are the parameters passed to {@link #execute} * by the caller of this task. * * This method can call {@link #publishProgress} to publish updates * on the UI thread. * * @param params The parameters of the task. * * @return A result, defined by the subclass of this task. * * @see #onPreExecute() * @see #onPostExecute * @see #publishProgress */ protected abstract Result doInBackground(Params... params); /** * Runs on the UI thread before {@link #doInBackground}. * * @see #onPostExecute * @see #doInBackground */ protected void onPreExecute() { } /** * <p>Runs on the UI thread after {@link #doInBackground}. The * specified result is the value returned by {@link #doInBackground}.</p> * * <p>This method won't be invoked if the task was cancelled.</p> * * @param result The result of the operation computed by {@link #doInBackground}. * * @see #onPreExecute * @see #doInBackground * @see #onCancelled(Object) */ protected void onPostExecute(Result result) { } /** * Runs on the UI thread after {@link #publishProgress} is invoked. * The specified values are the values passed to {@link #publishProgress}. * * @param values The values indicating progress. * * @see #publishProgress * @see #doInBackground */ protected void onProgressUpdate(Progress... values) { } /** * <p>Runs on the UI thread after {@link #cancel(boolean)} is invoked and * {@link #doInBackground(Object[])} has finished.</p> * * <p>The default implementation simply invokes {@link #onCancelled()} and * ignores the result. If you write your own implementation, do not call * <code>super.onCancelled(result)</code>.</p> * * @param result The result, if any, computed in * {@link #doInBackground(Object[])}, can be null * * @see #cancel(boolean) * @see #isCancelled() */ protected void onCancelled(Result result) { onCancelled(); } /** * <p>Applications should preferably override {@link #onCancelled(Object)}. * This method is invoked by the default implementation of * {@link #onCancelled(Object)}.</p> * * <p>Runs on the UI thread after {@link #cancel(boolean)} is invoked and * {@link #doInBackground(Object[])} has finished.</p> * * @see #onCancelled(Object) * @see #cancel(boolean) * @see #isCancelled() */ protected void onCancelled() { } /** * Returns <tt>true</tt> if this task was cancelled before it completed * normally. If you are calling {@link #cancel(boolean)} on the task, * the value returned by this method should be checked periodically from * {@link #doInBackground(Object[])} to end the task as soon as possible. * * @return <tt>true</tt> if task was cancelled before it completed * * @see #cancel(boolean) */ public final boolean isCancelled() { return mCancelled.get(); } /** * <p>Attempts to cancel execution of this task. This attempt will * fail if the task has already completed, already been cancelled, * or could not be cancelled for some other reason. If successful, * and this task has not started when <tt>cancel</tt> is called, * this task should never run. If the task has already started, * then the <tt>mayInterruptIfRunning</tt> parameter determines * whether the thread executing this task should be interrupted in * an attempt to stop the task.</p> * * <p>Calling this method will result in {@link #onCancelled(Object)} being * invoked on the UI thread after {@link #doInBackground(Object[])} * returns. Calling this method guarantees that {@link #onPostExecute(Object)} * is never invoked. After invoking this method, you should check the * value returned by {@link #isCancelled()} periodically from * {@link #doInBackground(Object[])} to finish the task as early as * possible.</p> * * @param mayInterruptIfRunning <tt>true</tt> if the thread executing this * task should be interrupted; otherwise, in-progress tasks are allowed * to complete. * * @return <tt>false</tt> if the task could not be cancelled, * typically because it has already completed normally; * <tt>true</tt> otherwise * * @see #isCancelled() * @see #onCancelled(Object) */ public final boolean cancel(boolean mayInterruptIfRunning) { mCancelled.set(true); return mFuture.cancel(mayInterruptIfRunning); } /** * Waits if necessary for the computation to complete, and then * retrieves its result. * * @return The computed result. * * @throws CancellationException If the computation was cancelled. * @throws ExecutionException If the computation threw an exception. * @throws InterruptedException If the current thread was interrupted * while waiting. */ public final Result get() throws InterruptedException, ExecutionException { return mFuture.get(); } /** * Waits if necessary for at most the given time for the computation * to complete, and then retrieves its result. * * @param timeout Time to wait before cancelling the operation. * @param unit The time unit for the timeout. * * @return The computed result. * * @throws CancellationException If the computation was cancelled. * @throws ExecutionException If the computation threw an exception. * @throws InterruptedException If the current thread was interrupted * while waiting. * @throws TimeoutException If the wait timed out. */ public final Result get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return mFuture.get(timeout, unit); } /** * Executes the task with the specified parameters. The task returns * itself (this) so that the caller can keep a reference to it. * * <p>Note: this function schedules the task on a queue for a single background * thread or pool of threads depending on the platform version. When first * introduced, AsyncTasks were executed serially on a single background thread. * Starting with {@link android.os.Build.VERSION_CODES#DONUT}, this was changed * to a pool of threads allowing multiple tasks to operate in parallel. Starting * {@link android.os.Build.VERSION_CODES#HONEYCOMB}, tasks are back to being * executed on a single thread to avoid common application errors caused * by parallel execution. If you truly want parallel execution, you can use * the {@link #executeOnExecutor} version of this method * with {@link #THREAD_POOL_EXECUTOR}; however, see commentary there for warnings * on its use. * * <p>This method must be invoked on the UI thread. * * @param params The parameters of the task. * * @return This instance of AsyncTask. * * @throws IllegalStateException If {@link #getStatus()} returns either * {@link LibrarySectionAsyncTask.Status#RUNNING} or {@link LibrarySectionAsyncTask.Status#FINISHED}. * * @see #executeOnExecutor(java.util.concurrent.Executor, Object[]) * @see #execute(Runnable) */ public final LibrarySectionAsyncTask<Params, Progress, Result> execute(Params... params) { return executeOnExecutor(sDefaultExecutor, params); } /** * Executes the task with the specified parameters. The task returns * itself (this) so that the caller can keep a reference to it. * * <p>This method is typically used with {@link #THREAD_POOL_EXECUTOR} to * allow multiple tasks to run in parallel on a pool of threads managed by * AsyncTask, however you can also use your own {@link Executor} for custom * behavior. * * <p><em>Warning:</em> Allowing multiple tasks to run in parallel from * a thread pool is generally <em>not</em> what one wants, because the order * of their operation is not defined. For example, if these tasks are used * to modify any state in common (such as writing a file due to a button click), * there are no guarantees on the order of the modifications. * Without careful work it is possible in rare cases for the newer version * of the data to be over-written by an older one, leading to obscure data * loss and stability issues. Such changes are best * executed in serial; to guarantee such work is serialized regardless of * platform version you can use this function with {@link #SERIAL_EXECUTOR}. * * <p>This method must be invoked on the UI thread. * * @param exec The executor to use. {@link #THREAD_POOL_EXECUTOR} is available as a * convenient process-wide thread pool for tasks that are loosely coupled. * @param params The parameters of the task. * * @return This instance of AsyncTask. * * @throws IllegalStateException If {@link #getStatus()} returns either * {@link LibrarySectionAsyncTask.Status#RUNNING} or {@link LibrarySectionAsyncTask.Status#FINISHED}. * * @see #execute(Object[]) */ @SuppressWarnings("incomplete-switch") public final LibrarySectionAsyncTask<Params, Progress, Result> executeOnExecutor(Executor exec, Params... params) { if (mStatus != Status.PENDING) { switch (mStatus) { case RUNNING: throw new IllegalStateException("Cannot execute task:" + " the task is already running."); case FINISHED: throw new IllegalStateException("Cannot execute task:" + " the task has already been executed " + "(a task can be executed only once)"); } } mStatus = Status.RUNNING; onPreExecute(); mWorker.mParams = params; exec.execute(mFuture); return this; } /** * Convenience version of {@link #execute(Object...)} for use with * a simple Runnable object. See {@link #execute(Object[])} for more * information on the order of execution. * * @see #execute(Object[]) * @see #executeOnExecutor(java.util.concurrent.Executor, Object[]) */ public static void execute(Runnable runnable) { sDefaultExecutor.execute(runnable); } /** * This method can be invoked from {@link #doInBackground} to * publish updates on the UI thread while the background computation is * still running. Each call to this method will trigger the execution of * {@link #onProgressUpdate} on the UI thread. * * {@link #onProgressUpdate} will note be called if the task has been * canceled. * * @param values The progress values to update the UI with. * * @see #onProgressUpdate * @see #doInBackground */ protected final void publishProgress(Progress... values) { if (!isCancelled()) { sHandler.obtainMessage(MESSAGE_POST_PROGRESS, new AsyncTaskResult<Progress>(this, values)).sendToTarget(); } } private void finish(Result result) { if (isCancelled()) { onCancelled(result); } else { onPostExecute(result); } mStatus = Status.FINISHED; } private static class InternalHandler extends Handler { @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void handleMessage(Message msg) { AsyncTaskResult result = (AsyncTaskResult) msg.obj; switch (msg.what) { case MESSAGE_POST_RESULT: // There is only one result result.mTask.finish(result.mData[0]); break; case MESSAGE_POST_PROGRESS: result.mTask.onProgressUpdate(result.mData); break; } } } private static abstract class WorkerRunnable<Params, Result> implements Callable<Result> { Params[] mParams; } private static class AsyncTaskResult<Data> { @SuppressWarnings("rawtypes") final LibrarySectionAsyncTask mTask; final Data[] mData; @SuppressWarnings("rawtypes") AsyncTaskResult(LibrarySectionAsyncTask task, Data... data) { mTask = task; mData = data; } } }
package com.michelboudreau.alternatorv2; import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.AmazonWebServiceClient; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.AmazonWebServiceResponse; import com.amazonaws.ClientConfiguration; import com.amazonaws.Request; import com.amazonaws.Response; import com.amazonaws.ResponseMetadata; import com.amazonaws.handlers.HandlerChainFactory; import com.amazonaws.http.ExecutionContext; import com.amazonaws.http.HttpResponseHandler; import com.amazonaws.http.JsonErrorResponseHandler; import com.amazonaws.http.JsonResponseHandler; import com.amazonaws.services.dynamodbv2.AmazonDynamoDB; import com.amazonaws.services.dynamodbv2.model.AttributeDefinition; import com.amazonaws.services.dynamodbv2.model.AttributeValue; import com.amazonaws.services.dynamodbv2.model.AttributeValueUpdate; import com.amazonaws.services.dynamodbv2.model.BatchGetItemRequest; import com.amazonaws.services.dynamodbv2.model.BatchGetItemResult; import com.amazonaws.services.dynamodbv2.model.BatchWriteItemRequest; import com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult; import com.amazonaws.services.dynamodbv2.model.Condition; import com.amazonaws.services.dynamodbv2.model.CreateTableRequest; import com.amazonaws.services.dynamodbv2.model.CreateTableResult; import com.amazonaws.services.dynamodbv2.model.DeleteItemRequest; import com.amazonaws.services.dynamodbv2.model.DeleteItemResult; import com.amazonaws.services.dynamodbv2.model.DeleteTableRequest; import com.amazonaws.services.dynamodbv2.model.DeleteTableResult; import com.amazonaws.services.dynamodbv2.model.DescribeTableRequest; import com.amazonaws.services.dynamodbv2.model.DescribeTableResult; import com.amazonaws.services.dynamodbv2.model.GetItemRequest; import com.amazonaws.services.dynamodbv2.model.GetItemResult; import com.amazonaws.services.dynamodbv2.model.KeySchemaElement; import com.amazonaws.services.dynamodbv2.model.KeysAndAttributes; import com.amazonaws.services.dynamodbv2.model.ListTablesRequest; import com.amazonaws.services.dynamodbv2.model.ListTablesResult; import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput; import com.amazonaws.services.dynamodbv2.model.PutItemRequest; import com.amazonaws.services.dynamodbv2.model.PutItemResult; import com.amazonaws.services.dynamodbv2.model.QueryRequest; import com.amazonaws.services.dynamodbv2.model.QueryResult; import com.amazonaws.services.dynamodbv2.model.ScanRequest; import com.amazonaws.services.dynamodbv2.model.ScanResult; import com.amazonaws.services.dynamodbv2.model.UpdateItemRequest; import com.amazonaws.services.dynamodbv2.model.UpdateItemResult; import com.amazonaws.services.dynamodbv2.model.UpdateTableRequest; import com.amazonaws.services.dynamodbv2.model.UpdateTableResult; import com.amazonaws.services.dynamodbv2.model.WriteRequest; import com.amazonaws.services.dynamodbv2.model.transform.BatchGetItemRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.BatchGetItemResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.BatchWriteItemRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.BatchWriteItemResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.ConditionalCheckFailedExceptionUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.CreateTableRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.CreateTableResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.DeleteItemRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.DeleteItemResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.DeleteTableRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.DeleteTableResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.DescribeTableRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.DescribeTableResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.GetItemRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.GetItemResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.InternalServerErrorExceptionUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.LimitExceededExceptionUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.ListTablesRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.ListTablesResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.ProvisionedThroughputExceededExceptionUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.PutItemRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.PutItemResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.QueryRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.QueryResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.ResourceInUseExceptionUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.ResourceNotFoundExceptionUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.ScanRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.ScanResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.UpdateItemRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.UpdateItemResultJsonUnmarshaller; import com.amazonaws.services.dynamodbv2.model.transform.UpdateTableRequestMarshaller; import com.amazonaws.services.dynamodbv2.model.transform.UpdateTableResultJsonUnmarshaller; import com.amazonaws.transform.JsonErrorUnmarshaller; import com.amazonaws.transform.JsonUnmarshallerContext; import com.amazonaws.transform.Unmarshaller; import java.net.URI; import java.net.URISyntaxException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.ArrayList; import java.util.List; public class AlternatorDBClientV2 extends AmazonWebServiceClient implements AmazonDynamoDB { private static final Log log = LogFactory.getLog(AlternatorDBClientV2.class); protected List<JsonErrorUnmarshaller> exceptionUnmarshallers; public AlternatorDBClientV2() { this(new ClientConfiguration()); } public AlternatorDBClientV2(ClientConfiguration clientConfiguration) { super(clientConfiguration); init(); } private void init() { exceptionUnmarshallers = new ArrayList<JsonErrorUnmarshaller>(); exceptionUnmarshallers.add(new LimitExceededExceptionUnmarshaller()); exceptionUnmarshallers.add(new InternalServerErrorExceptionUnmarshaller()); exceptionUnmarshallers.add(new ProvisionedThroughputExceededExceptionUnmarshaller()); exceptionUnmarshallers.add(new ResourceInUseExceptionUnmarshaller()); exceptionUnmarshallers.add(new ConditionalCheckFailedExceptionUnmarshaller()); exceptionUnmarshallers.add(new ResourceNotFoundExceptionUnmarshaller()); exceptionUnmarshallers.add(new JsonErrorUnmarshaller()); setEndpoint("http://localhost:9090/"); HandlerChainFactory chainFactory = new HandlerChainFactory(); requestHandler2s.addAll(chainFactory.newRequestHandlerChain("/com/amazonaws/services/dynamodb/request.handlers")); ClientConfiguration customClientConfiguration = new ClientConfiguration(clientConfiguration); customClientConfiguration.setSignerOverride("NoOpSignerType"); if (customClientConfiguration.getRetryPolicy() == ClientConfiguration.DEFAULT_RETRY_POLICY) { log.debug("Overriding default max error retry value to: " + 10); customClientConfiguration.setMaxErrorRetry(10); } setCustomConfiguration(customClientConfiguration); } private void setCustomConfiguration(ClientConfiguration customClientConfiguration) { this.clientConfiguration = customClientConfiguration; } @Override public ListTablesResult listTables(ListTablesRequest listTablesRequest) throws AmazonServiceException, AmazonClientException { Request<ListTablesRequest> request = new ListTablesRequestMarshaller().marshall(listTablesRequest); Unmarshaller<ListTablesResult, JsonUnmarshallerContext> unmarshaller = new ListTablesResultJsonUnmarshaller(); JsonResponseHandler<ListTablesResult> responseHandler = new JsonResponseHandler<ListTablesResult>(unmarshaller); return invoke(request, responseHandler); } @Override public QueryResult query(QueryRequest queryRequest) throws AmazonServiceException, AmazonClientException { Request<QueryRequest> request = new QueryRequestMarshaller().marshall(queryRequest); Unmarshaller<QueryResult, JsonUnmarshallerContext> unmarshaller = new QueryResultJsonUnmarshaller(); JsonResponseHandler<QueryResult> responseHandler = new JsonResponseHandler<QueryResult>(unmarshaller); return invoke(request, responseHandler); } @Override public BatchWriteItemResult batchWriteItem(BatchWriteItemRequest batchWriteItemRequest) throws AmazonServiceException, AmazonClientException { Request<BatchWriteItemRequest> request = new BatchWriteItemRequestMarshaller().marshall(batchWriteItemRequest); Unmarshaller<BatchWriteItemResult, JsonUnmarshallerContext> unmarshaller = new BatchWriteItemResultJsonUnmarshaller(); JsonResponseHandler<BatchWriteItemResult> responseHandler = new JsonResponseHandler<BatchWriteItemResult>(unmarshaller); return invoke(request, responseHandler); } @Override public BatchWriteItemResult batchWriteItem(java.util.Map<String,java.util.List<WriteRequest>> requestItems) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("batchWriteItem using Map is not implemented in Alternator."); } @Override public UpdateItemResult updateItem(UpdateItemRequest updateItemRequest) throws AmazonServiceException, AmazonClientException { Request<UpdateItemRequest> request = new UpdateItemRequestMarshaller().marshall(updateItemRequest); Unmarshaller<UpdateItemResult, JsonUnmarshallerContext> unmarshaller = new UpdateItemResultJsonUnmarshaller(); JsonResponseHandler<UpdateItemResult> responseHandler = new JsonResponseHandler<UpdateItemResult>(unmarshaller); return invoke(request, responseHandler); } @Override public UpdateItemResult updateItem(String tableName, java.util.Map<String, AttributeValue> key, java.util.Map<String, AttributeValueUpdate> attributeUpdates, String returnValues) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("updateItem using String, Map, Map, and String is not implemented in Alternator."); } @Override public UpdateItemResult updateItem(String tableName, java.util.Map<String,AttributeValue> key, java.util.Map<String,AttributeValueUpdate> attributeUpdates) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("updateItem using String, Map, and Map is not implemented in Alternator."); } @Override public PutItemResult putItem(PutItemRequest putItemRequest) throws AmazonServiceException, AmazonClientException { Request<PutItemRequest> request = new PutItemRequestMarshaller().marshall(putItemRequest); Unmarshaller<PutItemResult, JsonUnmarshallerContext> unmarshaller = new PutItemResultJsonUnmarshaller(); JsonResponseHandler<PutItemResult> responseHandler = new JsonResponseHandler<PutItemResult>(unmarshaller); return invoke(request, responseHandler); } @Override public PutItemResult putItem(String tableName, java.util.Map<String,AttributeValue> item, String returnValues) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("putItem using String, Map, and String is not implemented in Alternator."); } @Override public PutItemResult putItem(String tableName, java.util.Map<String,AttributeValue> item) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("putItem using String and Map is not implemented in Alternator."); } @Override public DescribeTableResult describeTable(DescribeTableRequest describeTableRequest) throws AmazonServiceException, AmazonClientException { Request<DescribeTableRequest> request = new DescribeTableRequestMarshaller().marshall(describeTableRequest); Unmarshaller<DescribeTableResult, JsonUnmarshallerContext> unmarshaller = new DescribeTableResultJsonUnmarshaller(); JsonResponseHandler<DescribeTableResult> responseHandler = new JsonResponseHandler<DescribeTableResult>(unmarshaller); return invoke(request, responseHandler); } @Override public DescribeTableResult describeTable(String tableName) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("describeTable using String is not implemented in Alternator."); } @Override public ScanResult scan(ScanRequest scanRequest) throws AmazonServiceException, AmazonClientException { Request<ScanRequest> request = new ScanRequestMarshaller().marshall(scanRequest); Unmarshaller<ScanResult, JsonUnmarshallerContext> unmarshaller = new ScanResultJsonUnmarshaller(); JsonResponseHandler<ScanResult> responseHandler = new JsonResponseHandler<ScanResult>(unmarshaller); return invoke(request, responseHandler); } @Override public ScanResult scan(String tableName, java.util.List<String> attributesToGet, java.util.Map<String,Condition> scanFilter) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("scan using String, List, and Map is not implemented in Alternator."); } @Override public ScanResult scan(String tableName, java.util.Map<String,Condition> scanFilter) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("scan using String and Map is not implemented in Alternator."); } @Override public ScanResult scan(String tableName, java.util.List<String> attributesToGet) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("scan using String and List is not implemented in Alternator."); } @Override public CreateTableResult createTable(CreateTableRequest createTableRequest) throws AmazonServiceException, AmazonClientException { Request<CreateTableRequest> request = new CreateTableRequestMarshaller().marshall(createTableRequest); Unmarshaller<CreateTableResult, JsonUnmarshallerContext> unmarshaller = new CreateTableResultJsonUnmarshaller(); JsonResponseHandler<CreateTableResult> responseHandler = new JsonResponseHandler<CreateTableResult>(unmarshaller); return invoke(request, responseHandler); } @Override public CreateTableResult createTable(java.util.List<AttributeDefinition> attributeDefinitions, String tableName, java.util.List<KeySchemaElement> keySchema, ProvisionedThroughput provisionedThroughput) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("createTable using List, String, List, and ProvisionedThroughput is not implemented in Alternator."); } @Override public UpdateTableResult updateTable(UpdateTableRequest updateTableRequest) throws AmazonServiceException, AmazonClientException { Request<UpdateTableRequest> request = new UpdateTableRequestMarshaller().marshall(updateTableRequest); Unmarshaller<UpdateTableResult, JsonUnmarshallerContext> unmarshaller = new UpdateTableResultJsonUnmarshaller(); JsonResponseHandler<UpdateTableResult> responseHandler = new JsonResponseHandler<UpdateTableResult>(unmarshaller); return invoke(request, responseHandler); } @Override public UpdateTableResult updateTable(String tableName, ProvisionedThroughput provisionedThroughput) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("updateTable using String and ProvisionedThroughput is not implemented in Alternator."); } @Override public DeleteTableResult deleteTable(DeleteTableRequest deleteTableRequest) throws AmazonServiceException, AmazonClientException { Request<DeleteTableRequest> request = new DeleteTableRequestMarshaller().marshall(deleteTableRequest); Unmarshaller<DeleteTableResult, JsonUnmarshallerContext> unmarshaller = new DeleteTableResultJsonUnmarshaller(); JsonResponseHandler<DeleteTableResult> responseHandler = new JsonResponseHandler<DeleteTableResult>(unmarshaller); return invoke(request, responseHandler); } @Override public DeleteTableResult deleteTable(String tableName) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("deleteTable using String is not implemented in Alternator."); } @Override public DeleteItemResult deleteItem(DeleteItemRequest deleteItemRequest) throws AmazonServiceException, AmazonClientException { Request<DeleteItemRequest> request = new DeleteItemRequestMarshaller().marshall(deleteItemRequest); Unmarshaller<DeleteItemResult, JsonUnmarshallerContext> unmarshaller = new DeleteItemResultJsonUnmarshaller(); JsonResponseHandler<DeleteItemResult> responseHandler = new JsonResponseHandler<DeleteItemResult>(unmarshaller); return invoke(request, responseHandler); } @Override public DeleteItemResult deleteItem(String tableName, java.util.Map<String,AttributeValue> key, String returnValues) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("deleteTable using String, Map, and String is not implemented in Alternator."); } @Override public DeleteItemResult deleteItem(String tableName, java.util.Map<String,AttributeValue> key) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("deleteItem using String and Map is not implemented in Alternator."); } @Override public GetItemResult getItem(GetItemRequest getItemRequest) throws AmazonServiceException, AmazonClientException { Request<GetItemRequest> request = new GetItemRequestMarshaller().marshall(getItemRequest); Unmarshaller<GetItemResult, JsonUnmarshallerContext> unmarshaller = new GetItemResultJsonUnmarshaller(); JsonResponseHandler<GetItemResult> responseHandler = new JsonResponseHandler<GetItemResult>(unmarshaller); return invoke(request, responseHandler); } @Override public GetItemResult getItem(String tableName, java.util.Map<String,AttributeValue> key, Boolean consistentRead) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("getItem using String, Map, and Boolean is not implemented in Alternator."); } @Override public GetItemResult getItem(String tableName, java.util.Map<String,AttributeValue> key) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("getItem using String and Map is not implemented in Alternator."); } @Override public BatchGetItemResult batchGetItem(BatchGetItemRequest batchGetItemRequest) throws AmazonServiceException, AmazonClientException { Request<BatchGetItemRequest> request = new BatchGetItemRequestMarshaller().marshall(batchGetItemRequest); Unmarshaller<BatchGetItemResult, JsonUnmarshallerContext> unmarshaller = new BatchGetItemResultJsonUnmarshaller(); JsonResponseHandler<BatchGetItemResult> responseHandler = new JsonResponseHandler<BatchGetItemResult>(unmarshaller); return invoke(request, responseHandler); } @Override public BatchGetItemResult batchGetItem(java.util.Map<String, KeysAndAttributes> requestItems) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("batchGetItem using Map is not implemented in Alternator."); } @Override public BatchGetItemResult batchGetItem(java.util.Map<String, KeysAndAttributes> requestItems, String returnConsumedCapacity) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("batchGetItem using Map and String is not implemented in Alternator."); } @Override public ListTablesResult listTables() throws AmazonServiceException, AmazonClientException { return listTables(new ListTablesRequest()); } @Override public void setEndpoint(String endpoint) throws IllegalArgumentException { URI endpointUri; try { endpointUri = new URI(endpoint); } catch (URISyntaxException ex) { throw new IllegalArgumentException(String.format("Invalid endpoint: %s", endpoint), ex); } synchronized (this) { this.endpoint = endpointUri; } } /** * Internal method for implementing {@link #getServiceName()}. Method is * protected by intent so peculiar subclass that don't follow the class * naming convention can choose to return whatever service name as needed. */ @Override protected String getServiceNameIntern() { return "dynamo"; } @Override public ListTablesResult listTables(Integer limit) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("listTables using Integer is not implemented in Alternator."); } @Override public ListTablesResult listTables(String exclusiveStartTableName, Integer limit) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("listTables using String and Integer is not implemented in Alternator."); } @Override public ListTablesResult listTables(String exclusiveStartTableName) throws AmazonServiceException, AmazonClientException { throw new AmazonClientException("listTables using String is not implemented in Alternator."); } @Override public ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request) { return client.getResponseMetadataForRequest(request); } private <X, Y extends AmazonWebServiceRequest> X invoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler) { request.setEndpoint(endpoint); AmazonWebServiceRequest originalRequest = request.getOriginalRequest(); ExecutionContext executionContext = createExecutionContext(request); // executionContext.setCustomBackoffStrategy(com.amazonaws.internal.DynamoDBBackoffStrategy.DEFAULT); JsonErrorResponseHandler errorResponseHandler = new JsonErrorResponseHandler(exceptionUnmarshallers); Response<X> result = client.execute(request, responseHandler, errorResponseHandler, executionContext); return result.getAwsResponse(); } }
package org.i3xx.util.ctree.func; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author Administrator * */ public class VarNode implements IVarNode { private static final Logger logger = LoggerFactory.getLogger(VarNode.class); protected OP op; protected String value; protected IVarNode left; protected IVarNode right; protected boolean comment; public static enum OP {REPLACE, LINK} /** * */ public VarNode(){ op = null; left = null; right = null; comment = false; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#isLeaf() */ public boolean isLeaf() { return ((left == null) && (right == null) && (value != null)); } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#isUnary() */ public boolean isUnary() { if(op==null) return false; switch(op){ case REPLACE: return true; default: return false; } } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#isComment() */ public boolean isComment() { return comment; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#setComment(boolean) */ public void setComment(boolean value){ comment = value; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#addLeft() */ public IVarNode addLeft() { if(left == null) left = new VarNode(); return left; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#getLeft() */ public IVarNode getLeft() { return left; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#setLeft(com.i3xx.util.ctree.IVarNode) */ public void setLeft(IVarNode left) { this.left = left; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#getOp() */ public OP getOp() { return op; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#setOp(com.i3xx.util.ctree.VarNode.OP) */ public void setOp(OP op) { this.op = op; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#addRight() */ public IVarNode addRight() { if(right == null) right = new VarNode(); return right; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#getRight() */ public IVarNode getRight() { return right; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#setRight(com.i3xx.util.ctree.IVarNode) */ public void setRight(IVarNode right) { this.right = right; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#getValue() */ public String getValue() { return value; } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#setValue(java.lang.String) */ public void setValue(String value) { this.value = value; } //--- print and parse --- /* (non-Javadoc) * @see java.lang.Object#toString() */ public String toString() { if(isLeaf()) return value; //Println.debug("leaf="+isLeaf()+" op="+(op!=null)+" left="+(left!=null)+" right="+(right!=null)); if(logger.isDebugEnabled()) logger.debug("leaf:{}, op:{}, left:{}, right:{}", isLeaf(), (op!=null), (left!=null), (right!=null)); switch(op){ case LINK: return left.toString() + OP_LINK + right.toString(); case REPLACE: return right.toString(); default: return ""; } } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#parse() */ public void parse() { if(value==null) return; int p1 = 0; //operatoren if((p1 = value.lastIndexOf(IVarNode.OP_LINK)) > -1) { setOp(VarNode.OP.LINK); addRight().setValue( value.substring(p1 + IVarNode.OP_LINK.length()) ); addLeft().setValue( value.substring(0, p1) ); value = null; //recursion /*Println.debug("set and parse left: "+getLeft().getValue());*/ left.parse(); /*Println.debug("got left: left="+ (left.getLeft()==null || left.getLeft().isLeaf() || left.getLeft().getValue()==null)+ " right="+(left.getRight()==null || left.getRight().isLeaf() || left.getRight().getValue()==null)+ " op="+(left.getOp())+ " value="+left.getValue());*/ if(logger.isDebugEnabled()) logger.debug("got left: left:{}, right:{}, op:{}, value:{}", (left.getLeft()==null || left.getLeft().isLeaf() || left.getLeft().getValue()==null), (left.getRight()==null || left.getRight().isLeaf() || left.getRight().getValue()==null), (left.getOp()), left.getValue() ); /*Println.debug("set and parse right: "+getRight().getValue());*/ right.parse(); /*Println.debug("got right: left="+ (right.getLeft()==null || right.getLeft().isLeaf() || right.getLeft().getValue()==null)+ " right="+(right.getRight()==null || right.getRight().isLeaf() || right.getRight().getValue()==null)+ " op="+(right.getOp())+ " value="+right.getValue());*/ if(logger.isDebugEnabled()) logger.debug("got left: left:{}, right:{}, op:{}, value:{}", (right.getLeft()==null || right.getLeft().isLeaf() || right.getLeft().getValue()==null), (right.getRight()==null || right.getRight().isLeaf() || right.getRight().getValue()==null), (right.getOp()), right.getValue() ); }else //unary operators if( value.contains(IVarNode.OP_REPLACE) ){ setOp(VarNode.OP.REPLACE); addRight().setValue( value ); value = null; } } /* (non-Javadoc) * @see com.i3xx.util.ctree.IVarNode#resolve(com.i3xx.util.ctree.IResolver) */ public void resolve(IResolver resolver) { // //Println.debug("resolve: right="+right+" left="+left+" op="+op+" value="+value+" resolver="+resolver); logger.debug("resolve right:{}, left:{}, op:{}, value:{}, resolver:{}", right, left, op, value, resolver); if(isLeaf()){ //nothing to resolve but }else if(isUnary()){ //Wildcard resolver tested; passed resolver.resolve(this); }else{ //Println.debug("resolve(2): leaf="+isLeaf()+" unary="+(op!=null?isUnary():"")+" op="+op+" value="+value+" resolver="+resolver); if(logger.isDebugEnabled()) logger.debug("resolve leaf:{}, unary:{}, value:{}, resolver:{}", isLeaf(), (op!=null?isUnary():""), value, resolver); //resolve value first right.resolve(resolver); //then resolve path left.resolve(resolver); //then resolve all resolver.resolve(this); } } }
package net.drewke.tdme.tools.shared.controller; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import net.drewke.tdme.gui.GUIParser; import net.drewke.tdme.gui.events.Action; import net.drewke.tdme.gui.events.GUIActionListener; import net.drewke.tdme.gui.events.GUIChangeListener; import net.drewke.tdme.gui.nodes.GUIElementNode; import net.drewke.tdme.gui.nodes.GUIParentNode; import net.drewke.tdme.gui.nodes.GUIScreenNode; import net.drewke.tdme.gui.nodes.GUITextNode; import net.drewke.tdme.os.FileSystem; import net.drewke.tdme.utils.Console; import net.drewke.tdme.utils.MutableString; /** * File dialog screen controller * @author Andreas Drewke * @version $Id$ */ public class FileDialogScreenController extends ScreenController implements GUIActionListener, GUIChangeListener { // screen node private GUIScreenNode screenNode; // curent working dir, extensions private String cwd; private String[] extensions; private String captionText; // gui elements private GUITextNode caption; private GUIElementNode fileName; private GUIElementNode files; // private MutableString value; // private Action applyAction; /** * Public constructor * @param model library controller */ public FileDialogScreenController() { try { this.cwd = new File(".").getCanonicalFile().toString(); } catch (IOException ioe) { ioe.printStackTrace(); } this.value = new MutableString(); this.applyAction = null; } /* * (non-Javadoc) * @see net.drewke.tdme.tools.shared.controller.ScreenController#getScreenNode() */ public GUIScreenNode getScreenNode() { return screenNode; } /** * @return path name */ public String getPathName() { return cwd; } /** * @return file name */ public String getFileName() { return fileName.getController().getValue().toString(); } /* * (non-Javadoc) * @see net.drewke.tdme.tools.shared.controller.ScreenController#initialize() */ public void initialize() { // load screen node try { screenNode = GUIParser.parse("resources/tools/shared/gui", "filedialog.xml"); screenNode.setVisible(false); screenNode.addActionListener(this); screenNode.addChangeListener(this); caption = (GUITextNode)screenNode.getNodeById("filedialog_caption"); files = (GUIElementNode)screenNode.getNodeById("filedialog_files"); fileName = (GUIElementNode)screenNode.getNodeById("filedialog_filename"); } catch (Exception e) { e.printStackTrace(); } } /* * (non-Javadoc) * @see net.drewke.tdme.tools.shared.controller.ScreenController#dispose() */ public void dispose() { } /** * Set up file dialog list box */ private void setupFileDialogListBox() { // set up caption { String directory = cwd; if (directory.length() > 50) directory = "..." + directory.substring(directory.length() - 50 + 3); caption.getText().set(captionText).append(directory); } // list files String[] fileList = new String[0]; try { String directory = cwd; fileList = FileSystem.getInstance().list(directory, new FilenameFilter() { public boolean accept(File directory, String file) { if (new File(directory, file).isDirectory() == true) return true; for (String extension: extensions) { if (file.toLowerCase().endsWith("." + extension)) return true; } return false; } }); } catch (IOException ioe) { ioe.printStackTrace(); } // files inner GUIParentNode filesInnerNode = (GUIParentNode)(files.getScreenNode().getNodeById(files.getId() + "_inner")); // construct XML for sub nodes int idx = 1; String filesInnerNodeSubNodesXML = ""; filesInnerNodeSubNodesXML+= "<scrollarea width=\"100%\" height=\"100%\">\n"; filesInnerNodeSubNodesXML+= "<selectbox-option text=\"..\" value=\"..\" />\n"; for (String file: fileList) { filesInnerNodeSubNodesXML+= "<selectbox-option text=\"" + GUIParser.escapeQuotes(file) + "\" value=\"" + GUIParser.escapeQuotes(file) + "\" />\n"; } filesInnerNodeSubNodesXML+= "</scrollarea>\n"; // inject sub nodes try { filesInnerNode.replaceSubNodes( filesInnerNodeSubNodesXML, true ); } catch (Exception e) { e.printStackTrace(); } } /** * Shows the file dialog pop up * @param caption text * @param extensions * @param apply action * @throws IOException */ public void show(String cwd, String captionText, String[] extensions, String fileName, Action applyAction) { try { this.cwd = new File(".").getCanonicalPath().toString(); this.cwd = new File(cwd).getCanonicalPath().toString(); } catch (IOException ioe) { ioe.printStackTrace(); } this.captionText = captionText; this.extensions = extensions; this.fileName.getController().setValue(value.set(fileName)); setupFileDialogListBox(); screenNode.setVisible(true); this.applyAction = applyAction; } /** * Abort the file dialog pop up */ public void close() { screenNode.setVisible(false); } /** * On value changed */ public void onValueChanged(GUIElementNode node) { try { if (node.getId().equals(files.getId()) == true) { String selectedFile = node.getController().getValue().toString(); Console.println(selectedFile); Console.println(cwd + ":" + selectedFile); if (new File(cwd, selectedFile).isDirectory()) { File file = new File(cwd, selectedFile); try { cwd = file.getCanonicalFile().toString(); } catch (IOException ioe) {} setupFileDialogListBox(); } else { fileName.getController().setValue( value.set(selectedFile) ); } } } catch (Exception e) { e.printStackTrace(); } } /* * (non-Javadoc) * @see net.drewke.tdme.gui.events.GUIActionListener#onActionPerformed(net.drewke.tdme.gui.events.GUIActionListener.Type, net.drewke.tdme.gui.nodes.GUIElementNode) */ public void onActionPerformed(Type type, GUIElementNode node) { switch (type) { case PERFORMED: { if (node.getId().equals("filedialog_apply")) { if (applyAction != null) applyAction.performAction(); } else if (node.getId().equals("filedialog_abort")) { close(); } break; } default: { break; } } } }
/*------------------------------------------------------------------------- * * Copyright (c) 2004-2014, PostgreSQL Global Development Group * * *------------------------------------------------------------------------- */ package org.postgresql.test.jdbc2; import org.postgresql.PGResultSetMetaData; import org.postgresql.test.TestUtil; import junit.framework.TestCase; import java.sql.*; public class ResultSetMetaDataTest extends TestCase { private Connection conn; public ResultSetMetaDataTest(String name) { super(name); } protected void setUp() throws Exception { conn = TestUtil.openDB(); TestUtil.createTable(conn, "rsmd1", "a int primary key, b text, c decimal(10,2)", true); TestUtil.createTable(conn, "timetest", "tm time(3), tmtz timetz, ts timestamp without time zone, tstz timestamp(6) with time zone"); TestUtil.dropSequence( conn, "serialtest_a_seq"); TestUtil.dropSequence( conn, "serialtest_b_seq"); TestUtil.createTable(conn, "serialtest", "a serial, b bigserial, c int"); TestUtil.createTable(conn, "alltypes", "bool boolean, i2 int2, i4 int4, i8 int8, num numeric(10,2), re real, fl float, ch char(3), vc varchar(3), tx text, d date, t time without time zone, tz time with time zone, ts timestamp without time zone, tsz timestamp with time zone, bt bytea"); TestUtil.createTable(conn, "sizetest", "fixedchar char(5), fixedvarchar varchar(5), unfixedvarchar varchar, txt text, bytearr bytea, num64 numeric(6,4), num60 numeric(6,0), num numeric, ip inet"); TestUtil.createTable(conn, "compositetest", "col rsmd1"); } protected void tearDown() throws Exception { TestUtil.dropTable(conn, "compositetest"); TestUtil.dropTable(conn, "rsmd1"); TestUtil.dropTable(conn, "timetest"); TestUtil.dropTable(conn, "serialtest"); TestUtil.dropTable(conn, "alltypes"); TestUtil.dropTable(conn, "sizetest"); TestUtil.dropSequence( conn, "serialtest_a_seq"); TestUtil.dropSequence( conn, "serialtest_b_seq"); TestUtil.closeDB(conn); } public void testStandardResultSet() throws SQLException { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT a,b,c,a+c as total,oid,b as d FROM rsmd1"); runStandardTests(rs.getMetaData()); rs.close(); stmt.close(); } public void testPreparedResultSet() throws SQLException { if (!TestUtil.isProtocolVersion(conn, 3)) return; PreparedStatement pstmt = conn.prepareStatement("SELECT a,b,c,a+c as total,oid,b as d FROM rsmd1 WHERE b = ?"); runStandardTests(pstmt.getMetaData()); pstmt.close(); } private void runStandardTests(ResultSetMetaData rsmd) throws SQLException { PGResultSetMetaData pgrsmd = (PGResultSetMetaData)rsmd; assertEquals(6, rsmd.getColumnCount()); assertEquals("a", rsmd.getColumnLabel(1)); assertEquals("total", rsmd.getColumnLabel(4)); assertEquals("a", rsmd.getColumnName(1)); assertEquals("oid", rsmd.getColumnName(5)); if (TestUtil.isProtocolVersion(conn, 3)) { assertEquals("", pgrsmd.getBaseColumnName(4)); assertEquals("b", pgrsmd.getBaseColumnName(6)); } assertEquals(Types.INTEGER, rsmd.getColumnType(1)); assertEquals(Types.VARCHAR, rsmd.getColumnType(2)); assertEquals("int4", rsmd.getColumnTypeName(1)); assertEquals("text", rsmd.getColumnTypeName(2)); assertEquals(10, rsmd.getPrecision(3)); assertEquals(2, rsmd.getScale(3)); assertEquals("", rsmd.getSchemaName(1)); assertEquals("", rsmd.getSchemaName(4)); if (TestUtil.isProtocolVersion(conn, 3)) { assertEquals("public", pgrsmd.getBaseSchemaName(1)); assertEquals("", pgrsmd.getBaseSchemaName(4)); } assertEquals("rsmd1", rsmd.getTableName(1)); assertEquals("", rsmd.getTableName(4)); if (TestUtil.isProtocolVersion(conn, 3)) { assertEquals("rsmd1", pgrsmd.getBaseTableName(1)); assertEquals("", pgrsmd.getBaseTableName(4)); } if (TestUtil.isProtocolVersion(conn, 3)) { assertEquals(ResultSetMetaData.columnNoNulls, rsmd.isNullable(1)); assertEquals(ResultSetMetaData.columnNullable, rsmd.isNullable(2)); assertEquals(ResultSetMetaData.columnNullableUnknown, rsmd.isNullable(4)); } else { assertEquals(ResultSetMetaData.columnNullableUnknown, rsmd.isNullable(1)); } } // verify that a prepared update statement returns no metadata and doesn't execute. public void testPreparedUpdate() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("INSERT INTO rsmd1(a,b) VALUES(?,?)"); pstmt.setInt(1,1); pstmt.setString(2,"hello"); ResultSetMetaData rsmd = pstmt.getMetaData(); assertNull(rsmd); pstmt.close(); Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM rsmd1"); assertTrue(rs.next()); assertEquals(0, rs.getInt(1)); rs.close(); stmt.close(); } public void testDatabaseMetaDataNames() throws SQLException { DatabaseMetaData databaseMetaData = conn.getMetaData(); ResultSet resultSet = databaseMetaData.getTableTypes(); ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); assertEquals(1, resultSetMetaData.getColumnCount()); assertEquals("TABLE_TYPE", resultSetMetaData.getColumnName(1)); resultSet.close(); } public void testTimestampInfo() throws SQLException { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT tm, tmtz, ts, tstz FROM timetest"); ResultSetMetaData rsmd = rs.getMetaData(); // For reference: // TestUtil.createTable(conn, "timetest", "tm time(3), tmtz timetz, ts timestamp without time zone, tstz timestamp(6) with time zone"); assertEquals(3, rsmd.getScale(1)); assertEquals(6, rsmd.getScale(2)); assertEquals(6, rsmd.getScale(3)); assertEquals(6, rsmd.getScale(4)); assertEquals(12, rsmd.getColumnDisplaySize(1)); assertEquals(21, rsmd.getColumnDisplaySize(2)); assertEquals(29, rsmd.getColumnDisplaySize(3)); assertEquals(35, rsmd.getColumnDisplaySize(4)); rs.close(); stmt.close(); } public void testColumnDisplaySize() throws SQLException { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT fixedchar, fixedvarchar, unfixedvarchar, txt, bytearr, num64, num60, num, ip FROM sizetest"); ResultSetMetaData rsmd = rs.getMetaData(); assertEquals(5, rsmd.getColumnDisplaySize(1)); assertEquals(5, rsmd.getColumnDisplaySize(2)); assertEquals(Integer.MAX_VALUE, rsmd.getColumnDisplaySize(3)); assertEquals(Integer.MAX_VALUE, rsmd.getColumnDisplaySize(4)); assertEquals(Integer.MAX_VALUE, rsmd.getColumnDisplaySize(5)); assertEquals(8, rsmd.getColumnDisplaySize(6)); assertEquals(7, rsmd.getColumnDisplaySize(7)); assertEquals(131089, rsmd.getColumnDisplaySize(8)); assertEquals(Integer.MAX_VALUE, rsmd.getColumnDisplaySize(9)); } public void testIsAutoIncrement() throws SQLException { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT c,b,a FROM serialtest"); ResultSetMetaData rsmd = rs.getMetaData(); assertTrue(!rsmd.isAutoIncrement(1)); if (TestUtil.isProtocolVersion(conn, 3)) { assertTrue(rsmd.isAutoIncrement(2)); assertTrue(rsmd.isAutoIncrement(3)); assertEquals("bigserial", rsmd.getColumnTypeName(2)); assertEquals("serial", rsmd.getColumnTypeName(3)); } rs.close(); stmt.close(); } public void testClassesMatch() throws SQLException { Statement stmt = conn.createStatement(); stmt.executeUpdate("INSERT INTO alltypes (bool, i2, i4, i8, num, re, fl, ch, vc, tx, d, t, tz, ts, tsz, bt) VALUES ('t', 2, 4, 8, 3.1, 3.14, 3.141, 'c', 'vc', 'tx', '2004-04-09', '09:01:00', '11:11:00-01','2004-04-09 09:01:00','1999-09-19 14:23:12-09', '\\\\123')"); ResultSet rs = stmt.executeQuery("SELECT * FROM alltypes"); ResultSetMetaData rsmd = rs.getMetaData(); assertTrue(rs.next()); for (int i=0; i<rsmd.getColumnCount(); i++) { assertEquals(rs.getObject(i+1).getClass().getName(), rsmd.getColumnClassName(i+1)); } } public void testComposite() throws Exception { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT col FROM compositetest"); ResultSetMetaData rsmd = rs.getMetaData(); assertEquals(Types.STRUCT, rsmd.getColumnType(1)); assertEquals("rsmd1", rsmd.getColumnTypeName(1)); } public void testUnexecutedStatement() throws Exception { PreparedStatement pstmt = conn.prepareStatement("SELECT col FROM compositetest"); // we have not executed the statement but we can still get the metadata ResultSetMetaData rsmd = pstmt.getMetaData(); assertEquals(Types.STRUCT, rsmd.getColumnType(1)); assertEquals("rsmd1", rsmd.getColumnTypeName(1)); } public void testClosedResultSet() throws Exception { PreparedStatement pstmt = conn.prepareStatement("SELECT col FROM compositetest"); ResultSet rs = pstmt.executeQuery(); rs.close(); // close the statement and make sure we can still get the metadata ResultSetMetaData rsmd = pstmt.getMetaData(); assertEquals(Types.STRUCT, rsmd.getColumnType(1)); assertEquals("rsmd1", rsmd.getColumnTypeName(1)); } }
package cgeo.geocaching; import butterknife.ButterKnife; import butterknife.InjectView; import butterknife.Optional; import cgeo.geocaching.activity.AbstractActionBarActivity; import cgeo.geocaching.activity.INavigationSource; import cgeo.geocaching.apps.cache.navi.NavigationAppFactory; import cgeo.geocaching.location.DistanceParser; import cgeo.geocaching.location.Geopoint; import cgeo.geocaching.location.GeopointFormatter; import cgeo.geocaching.sensors.GeoData; import cgeo.geocaching.sensors.GeoDirHandler; import cgeo.geocaching.sensors.Sensors; import cgeo.geocaching.settings.Settings; import cgeo.geocaching.ui.AbstractViewHolder; import cgeo.geocaching.ui.NavigationActionProvider; import cgeo.geocaching.ui.dialog.CoordinatesInputDialog; import cgeo.geocaching.ui.dialog.Dialogs; import cgeo.geocaching.utils.Formatter; import cgeo.geocaching.utils.Log; import cgeo.geocaching.utils.RxUtils; import org.apache.commons.lang3.StringUtils; import org.eclipse.jdt.annotation.Nullable; import rx.functions.Action0; import rx.schedulers.Schedulers; import android.app.Activity; import android.content.Context; import android.content.res.Configuration; import android.os.Bundle; import android.support.v4.view.MenuItemCompat; import android.view.ContextMenu; import android.view.ContextMenu.ContextMenuInfo; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnCreateContextMenuListener; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.ListView; import android.widget.Spinner; import android.widget.TextView; import java.util.List; public class NavigateAnyPointActivity extends AbstractActionBarActivity implements CoordinatesInputDialog.CoordinateUpdate, INavigationSource { @InjectView(R.id.historyList) protected ListView historyListView; // list header fields are optional, due to being expanded later than the list itself @Optional @InjectView(R.id.buttonLatitude) protected Button latButton; @Optional @InjectView(R.id.buttonLongitude) protected Button lonButton; @Optional @InjectView(R.id.distance) protected EditText distanceEditText; @Optional @InjectView(R.id.distanceUnit) protected Spinner distanceUnitSelector; @Optional @InjectView(R.id.current) protected Button buttonCurrent; @Optional @InjectView(R.id.bearing) protected EditText bearingEditText; private boolean changed = false; private List<Destination> historyOfSearchedLocations; private DestinationHistoryAdapter destinationHistoryAdapter; private TextView historyFooter; private static final int CONTEXT_MENU_NAVIGATE = 1; private static final int CONTEXT_MENU_DELETE_WAYPOINT = 2; private static final int CONTEXT_MENU_EDIT_WAYPOINT = 3; private int contextMenuItemPosition; private String distanceUnit = StringUtils.EMPTY; protected static class ViewHolder extends AbstractViewHolder { @InjectView(R.id.simple_way_point_longitude) protected TextView longitude; @InjectView(R.id.simple_way_point_latitude) protected TextView latitude; @InjectView(R.id.date) protected TextView date; public ViewHolder(final View rowView) { super(rowView); } } private static class DestinationHistoryAdapter extends ArrayAdapter<Destination> { private LayoutInflater inflater = null; public DestinationHistoryAdapter(final Context context, final List<Destination> objects) { super(context, 0, objects); } @Override public View getView(final int position, final View convertView, final ViewGroup parent) { View rowView = convertView; final ViewHolder viewHolder; if (rowView == null) { rowView = getInflater().inflate(R.layout.simple_way_point, parent, false); viewHolder = new ViewHolder(rowView); } else { viewHolder = (ViewHolder) rowView.getTag(); } fillViewHolder(viewHolder, getItem(position)); return rowView; } private static void fillViewHolder(final ViewHolder viewHolder, final Destination loc) { final String lonString = loc.getCoords().format(GeopointFormatter.Format.LON_DECMINUTE); final String latString = loc.getCoords().format(GeopointFormatter.Format.LAT_DECMINUTE); viewHolder.longitude.setText(lonString); viewHolder.latitude.setText(latString); viewHolder.date.setText(Formatter.formatShortDateTime(loc.getDate())); } private LayoutInflater getInflater() { if (inflater == null) { inflater = ((Activity) getContext()).getLayoutInflater(); } return inflater; } } @Override public void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState, R.layout.navigateanypoint_activity); ButterKnife.inject(this); createHistoryView(); init(); } private void createHistoryView() { final View pointControls = getLayoutInflater().inflate(R.layout.navigateanypoint_header, historyListView, false); historyListView.addHeaderView(pointControls, null, false); // inject a second time to also find the dynamically expanded views above ButterKnife.inject(this); if (getHistoryOfSearchedLocations().isEmpty()) { historyListView.addFooterView(getEmptyHistoryFooter(), null, false); } historyListView.setAdapter(getDestionationHistoryAdapter()); historyListView.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(final AdapterView<?> arg0, final View arg1, final int arg2, final long arg3) { final Object selection = arg0.getItemAtPosition(arg2); if (selection instanceof Destination) { navigateTo(((Destination) selection).getCoords()); } } }); historyListView.setOnCreateContextMenuListener(new OnCreateContextMenuListener() { @Override public void onCreateContextMenu(final ContextMenu menu, final View v, final ContextMenuInfo menuInfo) { menu.add(Menu.NONE, CONTEXT_MENU_NAVIGATE, Menu.NONE, res.getString(R.string.cache_menu_navigate)); menu.add(Menu.NONE, CONTEXT_MENU_EDIT_WAYPOINT, Menu.NONE, R.string.waypoint_edit); menu.add(Menu.NONE, CONTEXT_MENU_DELETE_WAYPOINT, Menu.NONE, R.string.waypoint_delete); } }); } @Override public boolean onContextItemSelected(final MenuItem item) { final AdapterView.AdapterContextMenuInfo menuInfo = (AdapterView.AdapterContextMenuInfo) item.getMenuInfo(); final int position = (null != menuInfo) ? menuInfo.position : contextMenuItemPosition; final Object destination = historyListView.getItemAtPosition(position); switch (item.getItemId()) { case CONTEXT_MENU_NAVIGATE: contextMenuItemPosition = position; if (destination instanceof Destination) { NavigationAppFactory.showNavigationMenu(this, null, null, ((Destination) destination).getCoords()); return true; } break; case CONTEXT_MENU_DELETE_WAYPOINT: if (destination instanceof Destination) { removeFromHistory((Destination) destination); } return true; case CONTEXT_MENU_EDIT_WAYPOINT: if (destination instanceof Destination) { final Geopoint gp = ((Destination) destination).getCoords(); latButton.setText(gp.format(GeopointFormatter.Format.LAT_DECMINUTE)); lonButton.setText(gp.format(GeopointFormatter.Format.LON_DECMINUTE)); } return true; default: } return super.onContextItemSelected(item); } private TextView getEmptyHistoryFooter() { if (historyFooter == null) { historyFooter = (TextView) getLayoutInflater().inflate(R.layout.cacheslist_footer, historyListView, false); historyFooter.setText(R.string.search_history_empty); } return historyFooter; } private DestinationHistoryAdapter getDestionationHistoryAdapter() { if (destinationHistoryAdapter == null) { destinationHistoryAdapter = new DestinationHistoryAdapter(this, getHistoryOfSearchedLocations()); } return destinationHistoryAdapter; } private List<Destination> getHistoryOfSearchedLocations() { if (historyOfSearchedLocations == null) { // Load from database historyOfSearchedLocations = DataStore.loadHistoryOfSearchedLocations(); } return historyOfSearchedLocations; } @Override public void onConfigurationChanged(final Configuration newConfig) { super.onConfigurationChanged(newConfig); init(); } @Override public void onResume() { super.onResume(geoDirHandler.start(GeoDirHandler.UPDATE_GEODATA)); init(); } private void init() { latButton.setOnClickListener(new CoordDialogListener()); lonButton.setOnClickListener(new CoordDialogListener()); final Geopoint coords = Settings.getAnyCoordinates(); if (coords != null) { latButton.setText(coords.format(GeopointFormatter.Format.LAT_DECMINUTE)); lonButton.setText(coords.format(GeopointFormatter.Format.LON_DECMINUTE)); } buttonCurrent.setOnClickListener(new CurrentListener()); getDestionationHistoryAdapter().notifyDataSetChanged(); disableSuggestions(distanceEditText); initializeDistanceUnitSelector(); } private void initializeDistanceUnitSelector() { if (StringUtils.isBlank(distanceUnit)) { if (Settings.useImperialUnits()) { distanceUnitSelector.setSelection(2); // ft distanceUnit = res.getStringArray(R.array.distance_units)[2]; } else { distanceUnitSelector.setSelection(0); // m distanceUnit = res.getStringArray(R.array.distance_units)[0]; } } distanceUnitSelector.setOnItemSelectedListener(new ChangeDistanceUnit(this)); } private class CoordDialogListener implements View.OnClickListener { @Override public void onClick(final View arg0) { Geopoint gp = null; if (latButton.getText().length() > 0 && lonButton.getText().length() > 0) { gp = new Geopoint(latButton.getText().toString() + " " + lonButton.getText().toString()); } final CoordinatesInputDialog coordsDialog = CoordinatesInputDialog.getInstance(null, gp, Sensors.getInstance().currentGeo()); coordsDialog.setCancelable(true); coordsDialog.show(getSupportFragmentManager(),"wpedit_dialog"); } } @Override public void updateCoordinates(final Geopoint gp) { latButton.setText(gp.format(GeopointFormatter.Format.LAT_DECMINUTE)); lonButton.setText(gp.format(GeopointFormatter.Format.LON_DECMINUTE)); changed = true; } private static class ChangeDistanceUnit implements OnItemSelectedListener { private ChangeDistanceUnit(final NavigateAnyPointActivity unitView) { this.unitView = unitView; } private final NavigateAnyPointActivity unitView; @Override public void onItemSelected(final AdapterView<?> arg0, final View arg1, final int arg2, final long arg3) { unitView.distanceUnit = (String) arg0.getItemAtPosition(arg2); } @Override public void onNothingSelected(final AdapterView<?> arg0) { } } @Override public boolean onCreateOptionsMenu(final Menu menu) { getMenuInflater().inflate(R.menu.navigate_any_point_activity_options, menu); final MenuItem menuItem = menu.findItem(R.id.menu_default_navigation); menuItem.setTitle(NavigationAppFactory.getDefaultNavigationApplication().getName()); final NavigationActionProvider navAction = (NavigationActionProvider) MenuItemCompat.getActionProvider(menuItem); if (navAction != null) { navAction.setNavigationSource(this); } return true; } @Override public boolean onPrepareOptionsMenu(final Menu menu) { super.onPrepareOptionsMenu(menu); try { final boolean visible = getDestination() != null; menu.findItem(R.id.menu_navigate).setVisible(visible); menu.findItem(R.id.menu_default_navigation).setVisible(visible); menu.findItem(R.id.menu_caches_around).setVisible(visible); menu.findItem(R.id.menu_clear_history).setVisible(!getHistoryOfSearchedLocations().isEmpty()); } catch (final RuntimeException ignored) { // nothing } return true; } @Override public boolean onOptionsItemSelected(final MenuItem item) { final int menuItem = item.getItemId(); final Geopoint coords = getDestinationAndAddToHistory(); switch (menuItem) { case R.id.menu_default_navigation: navigateTo(coords); return true; case R.id.menu_caches_around: cachesAround(coords); return true; case R.id.menu_clear_history: clearHistory(); return true; case R.id.menu_navigate: NavigationAppFactory.showNavigationMenu(this, null, null, coords); return true; } return super.onOptionsItemSelected(item); } private Geopoint getDestinationAndAddToHistory() { final Geopoint coords = getDestination(); addToHistory(coords); return coords; } private void addToHistory(@Nullable final Geopoint coords) { if (coords == null) { return; } // Add locations to history final Destination loc = new Destination(coords); if (!getHistoryOfSearchedLocations().contains(loc)) { getHistoryOfSearchedLocations().add(0, loc); RxUtils.andThenOnUi(Schedulers.io(), new Action0() { @Override public void call() { // Save location DataStore.saveSearchedDestination(loc); } }, new Action0() { @Override public void call() { // Ensure to remove the footer historyListView.removeFooterView(getEmptyHistoryFooter()); destinationHistoryAdapter.notifyDataSetChanged(); } }); } } private void removeFromHistory(final Destination destination) { if (getHistoryOfSearchedLocations().contains(destination)) { getHistoryOfSearchedLocations().remove(destination); // Save DataStore.removeSearchedDestination(destination); if (getHistoryOfSearchedLocations().isEmpty()) { if (historyListView.getFooterViewsCount() == 0) { historyListView.addFooterView(getEmptyHistoryFooter()); } } getDestionationHistoryAdapter().notifyDataSetChanged(); showToast(res.getString(R.string.search_remove_destination)); } } private void clearHistory() { if (!getHistoryOfSearchedLocations().isEmpty()) { getHistoryOfSearchedLocations().clear(); // Save DataStore.clearSearchedDestinations(); if (historyListView.getFooterViewsCount() == 0) { historyListView.addFooterView(getEmptyHistoryFooter()); } getDestionationHistoryAdapter().notifyDataSetChanged(); showToast(res.getString(R.string.search_history_cleared)); } } private void navigateTo(final Geopoint coords) { if (coords == null) { showToast(res.getString(R.string.err_location_unknown)); return; } NavigationAppFactory.startDefaultNavigationApplication(1, this, coords); } private void cachesAround(final Geopoint coords) { if (coords == null) { showToast(res.getString(R.string.err_location_unknown)); return; } CacheListActivity.startActivityCoordinates(this, coords, null); finish(); } private final GeoDirHandler geoDirHandler = new GeoDirHandler() { @Override public void updateGeoData(final GeoData geo) { try { latButton.setHint(geo.getCoords().format(GeopointFormatter.Format.LAT_DECMINUTE_RAW)); lonButton.setHint(geo.getCoords().format(GeopointFormatter.Format.LON_DECMINUTE_RAW)); } catch (final RuntimeException e) { Log.w("Failed to update location", e); } } }; private class CurrentListener implements View.OnClickListener { @Override public void onClick(final View arg0) { final Geopoint coords = Sensors.getInstance().currentGeo().getCoords(); latButton.setText(coords.format(GeopointFormatter.Format.LAT_DECMINUTE)); lonButton.setText(coords.format(GeopointFormatter.Format.LON_DECMINUTE)); changed = false; } } private Geopoint getDestination() { final String bearingText = bearingEditText.getText().toString(); // combine distance from EditText and distanceUnit saved from Spinner final String distanceText = distanceEditText.getText().toString() + distanceUnit; final String latText = latButton.getText().toString(); final String lonText = lonButton.getText().toString(); if (StringUtils.isBlank(bearingText) && StringUtils.isBlank(distanceText) && StringUtils.isBlank(latText) && StringUtils.isBlank(lonText)) { showToast(res.getString(R.string.err_point_no_position_given)); return null; } // get base coordinates Geopoint coords; if (StringUtils.isNotBlank(latText) && StringUtils.isNotBlank(lonText)) { try { coords = new Geopoint(latText, lonText); } catch (final Geopoint.ParseException e) { showToast(res.getString(e.resource)); return null; } } else { coords = Sensors.getInstance().currentGeo().getCoords(); } // apply projection if (StringUtils.isNotBlank(bearingText) && StringUtils.isNotBlank(distanceText)) { // bearing & distance final double bearing; try { bearing = Double.parseDouble(bearingText); } catch (final NumberFormatException ignored) { Dialogs.message(this, R.string.err_point_bear_and_dist_title, R.string.err_point_bear_and_dist); return null; } final double distance; try { distance = DistanceParser.parseDistance(distanceText, !Settings.useImperialUnits()); } catch (final NumberFormatException ignored) { showToast(res.getString(R.string.err_parse_dist)); return null; } coords = coords.project(bearing, distance); } saveCoords(coords); return coords; } private void saveCoords(final Geopoint coords) { if (!changed) { return; } Settings.setAnyCoordinates(coords); } @Override public void startDefaultNavigation() { navigateTo(getDestinationAndAddToHistory()); } @Override public void startDefaultNavigation2() { NavigationAppFactory.startDefaultNavigationApplication(2, this, getDestinationAndAddToHistory()); } }
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.jms.listener; import java.util.HashSet; import java.util.Set; import jakarta.jms.Connection; import jakarta.jms.ConnectionFactory; import jakarta.jms.ExceptionListener; import jakarta.jms.JMSException; import jakarta.jms.Message; import jakarta.jms.MessageConsumer; import jakarta.jms.MessageListener; import jakarta.jms.Session; import org.junit.jupiter.api.Test; import org.springframework.context.support.GenericApplicationContext; import org.springframework.jms.StubQueue; import org.springframework.lang.Nullable; import org.springframework.util.ErrorHandler; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.fail; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; /** * @author Rick Evans * @author Juergen Hoeller * @author Chris Beams * @author Mark Fisher */ public class SimpleMessageListenerContainerTests { private static final String DESTINATION_NAME = "foo"; private static final String EXCEPTION_MESSAGE = "This.Is.It"; private static final StubQueue QUEUE_DESTINATION = new StubQueue(); private final SimpleMessageListenerContainer container = new SimpleMessageListenerContainer(); @Test public void testSettingMessageListenerToANullType() { this.container.setMessageListener(null); assertThat(this.container.getMessageListener()).isNull(); } @Test public void testSettingMessageListenerToAnUnsupportedType() { assertThatIllegalArgumentException().isThrownBy(() -> this.container.setMessageListener("Bingo")); } @Test public void testSessionTransactedModeReallyDoesDefaultToFalse() { assertThat(this.container.isPubSubNoLocal()).as("The [pubSubLocal] property of SimpleMessageListenerContainer " + "must default to false. Change this test (and the attendant javadoc) if you have changed the default.").isFalse(); } @Test public void testSettingConcurrentConsumersToZeroIsNotAllowed() { assertThatIllegalArgumentException().isThrownBy(() -> { this.container.setConcurrentConsumers(0); this.container.afterPropertiesSet(); }); } @Test public void testSettingConcurrentConsumersToANegativeValueIsNotAllowed() { assertThatIllegalArgumentException().isThrownBy(() -> { this.container.setConcurrentConsumers(-198); this.container.afterPropertiesSet(); }); } @Test public void testContextRefreshedEventDoesNotStartTheConnectionIfAutoStartIsSetToFalse() throws Exception { MessageConsumer messageConsumer = mock(MessageConsumer.class); Session session = mock(Session.class); // Queue gets created in order to create MessageConsumer for that Destination... given(session.createQueue(DESTINATION_NAME)).willReturn(QUEUE_DESTINATION); // and then the MessageConsumer gets created... given(session.createConsumer(QUEUE_DESTINATION, null)).willReturn(messageConsumer); // no MessageSelector... Connection connection = mock(Connection.class); // session gets created in order to register MessageListener... given(connection.createSession(this.container.isSessionTransacted(), this.container.getSessionAcknowledgeMode())).willReturn(session); ConnectionFactory connectionFactory = mock(ConnectionFactory.class); given(connectionFactory.createConnection()).willReturn(connection); this.container.setConnectionFactory(connectionFactory); this.container.setDestinationName(DESTINATION_NAME); this.container.setMessageListener(new TestMessageListener()); this.container.setAutoStartup(false); this.container.afterPropertiesSet(); GenericApplicationContext context = new GenericApplicationContext(); context.getBeanFactory().registerSingleton("messageListenerContainer", this.container); context.refresh(); context.close(); verify(connection).setExceptionListener(this.container); } @Test public void testContextRefreshedEventStartsTheConnectionByDefault() throws Exception { MessageConsumer messageConsumer = mock(MessageConsumer.class); Session session = mock(Session.class); // Queue gets created in order to create MessageConsumer for that Destination... given(session.createQueue(DESTINATION_NAME)).willReturn(QUEUE_DESTINATION); // and then the MessageConsumer gets created... given(session.createConsumer(QUEUE_DESTINATION, null)).willReturn(messageConsumer); // no MessageSelector... Connection connection = mock(Connection.class); // session gets created in order to register MessageListener... given(connection.createSession(this.container.isSessionTransacted(), this.container.getSessionAcknowledgeMode())).willReturn(session); // and the connection is start()ed after the listener is registered... ConnectionFactory connectionFactory = mock(ConnectionFactory.class); given(connectionFactory.createConnection()).willReturn(connection); this.container.setConnectionFactory(connectionFactory); this.container.setDestinationName(DESTINATION_NAME); this.container.setMessageListener(new TestMessageListener()); this.container.afterPropertiesSet(); GenericApplicationContext context = new GenericApplicationContext(); context.getBeanFactory().registerSingleton("messageListenerContainer", this.container); context.refresh(); context.close(); verify(connection).setExceptionListener(this.container); verify(connection).start(); } @Test public void testCorrectSessionExposedForSessionAwareMessageListenerInvocation() throws Exception { final SimpleMessageConsumer messageConsumer = new SimpleMessageConsumer(); final Session session = mock(Session.class); // Queue gets created in order to create MessageConsumer for that Destination... given(session.createQueue(DESTINATION_NAME)).willReturn(QUEUE_DESTINATION); // and then the MessageConsumer gets created... given(session.createConsumer(QUEUE_DESTINATION, null)).willReturn(messageConsumer); // no MessageSelector... // an exception is thrown, so the rollback logic is being applied here... given(session.getTransacted()).willReturn(false); given(session.getAcknowledgeMode()).willReturn(Session.AUTO_ACKNOWLEDGE); Connection connection = mock(Connection.class); // session gets created in order to register MessageListener... given(connection.createSession(this.container.isSessionTransacted(), this.container.getSessionAcknowledgeMode())).willReturn(session); // and the connection is start()ed after the listener is registered... final ConnectionFactory connectionFactory = mock(ConnectionFactory.class); given(connectionFactory.createConnection()).willReturn(connection); final Set<String> failure = new HashSet<>(1); this.container.setConnectionFactory(connectionFactory); this.container.setDestinationName(DESTINATION_NAME); this.container.setMessageListener((SessionAwareMessageListener<Message>) (Message message, @Nullable Session sess) -> { try { // Check correct Session passed into SessionAwareMessageListener. assertThat(session).isSameAs(sess); } catch (Throwable ex) { failure.add("MessageListener execution failed: " + ex); } }); this.container.afterPropertiesSet(); this.container.start(); final Message message = mock(Message.class); messageConsumer.sendMessage(message); if (!failure.isEmpty()) { fail(failure.iterator().next().toString()); } verify(connection).setExceptionListener(this.container); verify(connection).start(); } @Test public void testTaskExecutorCorrectlyInvokedWhenSpecified() throws Exception { final SimpleMessageConsumer messageConsumer = new SimpleMessageConsumer(); final Session session = mock(Session.class); given(session.createQueue(DESTINATION_NAME)).willReturn(QUEUE_DESTINATION); given(session.createConsumer(QUEUE_DESTINATION, null)).willReturn(messageConsumer); // no MessageSelector... given(session.getTransacted()).willReturn(false); given(session.getAcknowledgeMode()).willReturn(Session.AUTO_ACKNOWLEDGE); Connection connection = mock(Connection.class); given(connection.createSession(this.container.isSessionTransacted(), this.container.getSessionAcknowledgeMode())).willReturn(session); final ConnectionFactory connectionFactory = mock(ConnectionFactory.class); given(connectionFactory.createConnection()).willReturn(connection); final TestMessageListener listener = new TestMessageListener(); this.container.setConnectionFactory(connectionFactory); this.container.setDestinationName(DESTINATION_NAME); this.container.setMessageListener(listener); this.container.setTaskExecutor(task -> { listener.executorInvoked = true; assertThat(listener.listenerInvoked).isFalse(); task.run(); assertThat(listener.listenerInvoked).isTrue(); }); this.container.afterPropertiesSet(); this.container.start(); final Message message = mock(Message.class); messageConsumer.sendMessage(message); assertThat(listener.executorInvoked).isTrue(); assertThat(listener.listenerInvoked).isTrue(); verify(connection).setExceptionListener(this.container); verify(connection).start(); } @Test public void testRegisteredExceptionListenerIsInvokedOnException() throws Exception { final SimpleMessageConsumer messageConsumer = new SimpleMessageConsumer(); Session session = mock(Session.class); // Queue gets created in order to create MessageConsumer for that Destination... given(session.createQueue(DESTINATION_NAME)).willReturn(QUEUE_DESTINATION); // and then the MessageConsumer gets created... given(session.createConsumer(QUEUE_DESTINATION, null)).willReturn(messageConsumer); // no MessageSelector... // an exception is thrown, so the rollback logic is being applied here... given(session.getTransacted()).willReturn(false); Connection connection = mock(Connection.class); // session gets created in order to register MessageListener... given(connection.createSession(this.container.isSessionTransacted(), this.container.getSessionAcknowledgeMode())).willReturn(session); // and the connection is start()ed after the listener is registered... ConnectionFactory connectionFactory = mock(ConnectionFactory.class); given(connectionFactory.createConnection()).willReturn(connection); final JMSException theException = new JMSException(EXCEPTION_MESSAGE); this.container.setConnectionFactory(connectionFactory); this.container.setDestinationName(DESTINATION_NAME); this.container.setMessageListener((SessionAwareMessageListener<Message>) (Message message, @Nullable Session session1) -> { throw theException; }); ExceptionListener exceptionListener = mock(ExceptionListener.class); this.container.setExceptionListener(exceptionListener); this.container.afterPropertiesSet(); this.container.start(); // manually trigger an Exception with the above bad MessageListener... final Message message = mock(Message.class); // a Throwable from a MessageListener MUST simply be swallowed... messageConsumer.sendMessage(message); verify(connection).setExceptionListener(this.container); verify(connection).start(); verify(exceptionListener).onException(theException); } @Test public void testRegisteredErrorHandlerIsInvokedOnException() throws Exception { final SimpleMessageConsumer messageConsumer = new SimpleMessageConsumer(); Session session = mock(Session.class); // Queue gets created in order to create MessageConsumer for that Destination... given(session.createQueue(DESTINATION_NAME)).willReturn(QUEUE_DESTINATION); // and then the MessageConsumer gets created... given(session.createConsumer(QUEUE_DESTINATION, null)).willReturn(messageConsumer); // no MessageSelector... // an exception is thrown, so the rollback logic is being applied here... given(session.getTransacted()).willReturn(false); Connection connection = mock(Connection.class); // session gets created in order to register MessageListener... given(connection.createSession(this.container.isSessionTransacted(), this.container.getSessionAcknowledgeMode())).willReturn(session); ConnectionFactory connectionFactory = mock(ConnectionFactory.class); given(connectionFactory.createConnection()).willReturn(connection); final IllegalStateException theException = new IllegalStateException("intentional test failure"); this.container.setConnectionFactory(connectionFactory); this.container.setDestinationName(DESTINATION_NAME); this.container.setMessageListener((SessionAwareMessageListener<Message>) (Message message, @Nullable Session session1) -> { throw theException; }); ErrorHandler errorHandler = mock(ErrorHandler.class); this.container.setErrorHandler(errorHandler); this.container.afterPropertiesSet(); this.container.start(); // manually trigger an Exception with the above bad MessageListener... Message message = mock(Message.class); // a Throwable from a MessageListener MUST simply be swallowed... messageConsumer.sendMessage(message); verify(connection).setExceptionListener(this.container); verify(connection).start(); verify(errorHandler).handleError(theException); } @Test public void testNoRollbackOccursIfSessionIsNotTransactedAndThatExceptionsDo_NOT_Propagate() throws Exception { final SimpleMessageConsumer messageConsumer = new SimpleMessageConsumer(); Session session = mock(Session.class); // Queue gets created in order to create MessageConsumer for that Destination... given(session.createQueue(DESTINATION_NAME)).willReturn(QUEUE_DESTINATION); // and then the MessageConsumer gets created... given(session.createConsumer(QUEUE_DESTINATION, null)).willReturn(messageConsumer); // no MessageSelector... // an exception is thrown, so the rollback logic is being applied here... given(session.getTransacted()).willReturn(false); Connection connection = mock(Connection.class); // session gets created in order to register MessageListener... given(connection.createSession(this.container.isSessionTransacted(), this.container.getSessionAcknowledgeMode())).willReturn(session); // and the connection is start()ed after the listener is registered... ConnectionFactory connectionFactory = mock(ConnectionFactory.class); given(connectionFactory.createConnection()).willReturn(connection); this.container.setConnectionFactory(connectionFactory); this.container.setDestinationName(DESTINATION_NAME); this.container.setMessageListener((MessageListener) message -> { throw new UnsupportedOperationException(); }); this.container.afterPropertiesSet(); this.container.start(); // manually trigger an Exception with the above bad MessageListener... final Message message = mock(Message.class); // a Throwable from a MessageListener MUST simply be swallowed... messageConsumer.sendMessage(message); verify(connection).setExceptionListener(this.container); verify(connection).start(); } @Test public void testTransactedSessionsGetRollbackLogicAppliedAndThatExceptionsStillDo_NOT_Propagate() throws Exception { this.container.setSessionTransacted(true); final SimpleMessageConsumer messageConsumer = new SimpleMessageConsumer(); Session session = mock(Session.class); // Queue gets created in order to create MessageConsumer for that Destination... given(session.createQueue(DESTINATION_NAME)).willReturn(QUEUE_DESTINATION); // and then the MessageConsumer gets created... given(session.createConsumer(QUEUE_DESTINATION, null)).willReturn(messageConsumer); // no MessageSelector... // an exception is thrown, so the rollback logic is being applied here... given(session.getTransacted()).willReturn(true); Connection connection = mock(Connection.class); // session gets created in order to register MessageListener... given(connection.createSession(this.container.isSessionTransacted(), this.container.getSessionAcknowledgeMode())).willReturn(session); // and the connection is start()ed after the listener is registered... ConnectionFactory connectionFactory = mock(ConnectionFactory.class); given(connectionFactory.createConnection()).willReturn(connection); this.container.setConnectionFactory(connectionFactory); this.container.setDestinationName(DESTINATION_NAME); this.container.setMessageListener((MessageListener) message -> { throw new UnsupportedOperationException(); }); this.container.afterPropertiesSet(); this.container.start(); // manually trigger an Exception with the above bad MessageListener... final Message message = mock(Message.class); // a Throwable from a MessageListener MUST simply be swallowed... messageConsumer.sendMessage(message); // Session is rolled back 'cos it is transacted... verify(session).rollback(); verify(connection).setExceptionListener(this.container); verify(connection).start(); } @Test public void testDestroyClosesConsumersSessionsAndConnectionInThatOrder() throws Exception { MessageConsumer messageConsumer = mock(MessageConsumer.class); Session session = mock(Session.class); // Queue gets created in order to create MessageConsumer for that Destination... given(session.createQueue(DESTINATION_NAME)).willReturn(QUEUE_DESTINATION); // and then the MessageConsumer gets created... given(session.createConsumer(QUEUE_DESTINATION, null)).willReturn(messageConsumer); // no MessageSelector... Connection connection = mock(Connection.class); // session gets created in order to register MessageListener... given(connection.createSession(this.container.isSessionTransacted(), this.container.getSessionAcknowledgeMode())).willReturn(session); // and the connection is start()ed after the listener is registered... ConnectionFactory connectionFactory = mock(ConnectionFactory.class); given(connectionFactory.createConnection()).willReturn(connection); this.container.setConnectionFactory(connectionFactory); this.container.setDestinationName(DESTINATION_NAME); this.container.setMessageListener(new TestMessageListener()); this.container.afterPropertiesSet(); this.container.start(); this.container.destroy(); verify(messageConsumer).close(); verify(session).close(); verify(connection).setExceptionListener(this.container); verify(connection).start(); verify(connection).close(); } private static class TestMessageListener implements MessageListener { public boolean executorInvoked = false; public boolean listenerInvoked = false; @Override public void onMessage(Message message) { this.listenerInvoked = true; } } private static class SimpleMessageConsumer implements MessageConsumer { private MessageListener messageListener; public void sendMessage(Message message) { this.messageListener.onMessage(message); } @Override public String getMessageSelector() { throw new UnsupportedOperationException(); } @Override public MessageListener getMessageListener() { return this.messageListener; } @Override public void setMessageListener(MessageListener messageListener) { this.messageListener = messageListener; } @Override public Message receive() { throw new UnsupportedOperationException(); } @Override public Message receive(long l) { throw new UnsupportedOperationException(); } @Override public Message receiveNoWait() { throw new UnsupportedOperationException(); } @Override public void close() { throw new UnsupportedOperationException(); } } }
/* * Copyright 2016-2019 David Karnok * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package hu.akarnokd.rxjava2.operators; import java.util.concurrent.atomic.*; import io.reactivex.*; import io.reactivex.disposables.Disposable; import io.reactivex.exceptions.Exceptions; import io.reactivex.functions.*; import io.reactivex.internal.disposables.DisposableHelper; import io.reactivex.internal.functions.ObjectHelper; import io.reactivex.internal.queue.SpscLinkedArrayQueue; import io.reactivex.internal.util.*; import io.reactivex.plugins.RxJavaPlugins; /** * Maps each upstream element into the single result of an inner Observable while * keeping the order of items and combines the original and inner item into the output * value for the downstream. * * @param <T> the upstream value type * @param <U> the inner Observable's element type * @param <R> the result element type * @since 0.20.4 */ final class ObservableMapAsync<T, U, R> extends Observable<R> implements ObservableTransformer<T, R> { final ObservableSource<T> source; final Function<? super T, ? extends ObservableSource<? extends U>> mapper; final BiFunction<? super T, ? super U, ? extends R> combiner; final int capacityHint; ObservableMapAsync(ObservableSource<T> source, Function<? super T, ? extends ObservableSource<? extends U>> mapper, BiFunction<? super T, ? super U, ? extends R> combiner, int capacityHint) { super(); this.source = source; this.mapper = mapper; this.combiner = combiner; this.capacityHint = capacityHint; } @Override public ObservableSource<R> apply(Observable<T> upstream) { return new ObservableMapAsync<T, U, R>(upstream, mapper, combiner, capacityHint); } @Override protected void subscribeActual(Observer<? super R> observer) { source.subscribe(new MapAsyncObserver<T, U, R>(observer, mapper, combiner, capacityHint)); } static final class MapAsyncObserver<T, U, R> extends AtomicInteger implements Observer<T>, Disposable { private static final long serialVersionUID = -204261674817426393L; final Observer<? super R> downstream; final Function<? super T, ? extends ObservableSource<? extends U>> mapper; final BiFunction<? super T, ? super U, ? extends R> combiner; final SpscLinkedArrayQueue<T> queue; final AtomicThrowable errors; final AtomicReference<Disposable> innerDisposable; Disposable upstream; volatile boolean done; volatile boolean disposed; T current; volatile int state; U inner; static final int STATE_FRESH = 0; static final int STATE_RUNNING = 1; static final int STATE_SUCCESS = 2; static final int STATE_EMPTY = 3; MapAsyncObserver( Observer<? super R> downstream, Function<? super T, ? extends ObservableSource<? extends U>> mapper, BiFunction<? super T, ? super U, ? extends R> combiner, int capacityHint) { this.downstream = downstream; this.mapper = mapper; this.combiner = combiner; this.queue = new SpscLinkedArrayQueue<T>(capacityHint); this.errors = new AtomicThrowable(); this.innerDisposable = new AtomicReference<Disposable>(); } @Override public void dispose() { disposed = true; upstream.dispose(); DisposableHelper.dispose(innerDisposable); drain(); } @Override public boolean isDisposed() { return disposed; } @Override public void onSubscribe(Disposable d) { if (DisposableHelper.validate(upstream, d)) { this.upstream = d; downstream.onSubscribe(this); } } @Override public void onNext(T t) { queue.offer(t); drain(); } @Override public void onError(Throwable e) { DisposableHelper.dispose(innerDisposable); if (errors.addThrowable(e)) { done = true; drain(); } else { RxJavaPlugins.onError(e); } } @Override public void onComplete() { done = true; drain(); } public void drain() { if (getAndIncrement() != 0) { return; } int missed = 1; for (;;) { if (disposed) { current = null; inner = null; queue.clear(); } else { if (errors.get() != null) { Throwable ex = errors.terminate(); disposed = true; downstream.onError(ex); continue; } int s = state; if (s == STATE_FRESH) { boolean d = done; T item = queue.poll(); boolean empty = item == null; if (d && empty) { downstream.onComplete(); } else if (!empty) { current = item; ObservableSource<? extends U> innerSource; try { innerSource = ObjectHelper.requireNonNull(mapper.apply(item), "The mapper returned a null ObservableSource"); } catch (Throwable ex) { Exceptions.throwIfFatal(ex); upstream.dispose(); errors.addThrowable(ex); ex = errors.terminate(); disposed = true; downstream.onError(ex); continue; } state = STATE_RUNNING; innerSource.subscribe(new InnerObserver()); } } else if (s == STATE_SUCCESS) { T mainItem = current; current = null; U innerItem = inner; inner = null; R result; try { result = ObjectHelper.requireNonNull(combiner.apply(mainItem, innerItem), "The combiner returned a null value"); } catch (Throwable ex) { Exceptions.throwIfFatal(ex); upstream.dispose(); errors.addThrowable(ex); ex = errors.terminate(); disposed = true; downstream.onError(ex); continue; } downstream.onNext(result); state = STATE_FRESH; continue; } else if (s == STATE_EMPTY) { current = null; state = STATE_FRESH; continue; } } missed = addAndGet(-missed); if (missed == 0) { break; } } } void innerSuccess(U item) { inner = item; state = STATE_SUCCESS; DisposableHelper.replace(innerDisposable, null); drain(); } void innerError(Throwable ex) { if (errors.addThrowable(ex)) { state = STATE_EMPTY; DisposableHelper.replace(innerDisposable, null); upstream.dispose(); drain(); } else { RxJavaPlugins.onError(ex); } } void innerComplete() { state = STATE_EMPTY; DisposableHelper.replace(innerDisposable, null); drain(); } final class InnerObserver implements Observer<U> { boolean once; @Override public void onSubscribe(Disposable d) { DisposableHelper.setOnce(innerDisposable, d); } @Override public void onNext(U t) { if (!once) { once = true; innerDisposable.get().dispose(); innerSuccess(t); } } @Override public void onError(Throwable e) { if (!once) { innerError(e); } else { RxJavaPlugins.onError(e); } } @Override public void onComplete() { if (!once) { innerComplete(); } } } } }
/** * Copyright 2016 Smart Society Services B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ package org.opensmartgridplatform.cucumber.platform.publiclighting.glue.steps.ws.publiclighting.adhocmanagement; import static org.opensmartgridplatform.cucumber.core.ReadSettingsHelper.getBoolean; import static org.opensmartgridplatform.cucumber.core.ReadSettingsHelper.getInteger; import static org.opensmartgridplatform.cucumber.core.ReadSettingsHelper.getString; import static org.opensmartgridplatform.cucumber.platform.core.CorrelationUidHelper.saveCorrelationUidInScenarioContext; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.ws.soap.client.SoapFaultClientException; import org.opensmartgridplatform.adapter.ws.schema.publiclighting.adhocmanagement.LightValue; import org.opensmartgridplatform.adapter.ws.schema.publiclighting.adhocmanagement.SetLightAsyncRequest; import org.opensmartgridplatform.adapter.ws.schema.publiclighting.adhocmanagement.SetLightAsyncResponse; import org.opensmartgridplatform.adapter.ws.schema.publiclighting.adhocmanagement.SetLightRequest; import org.opensmartgridplatform.adapter.ws.schema.publiclighting.adhocmanagement.SetLightResponse; import org.opensmartgridplatform.adapter.ws.schema.publiclighting.common.AsyncRequest; import org.opensmartgridplatform.adapter.ws.schema.publiclighting.common.OsgpResultType; import org.opensmartgridplatform.cucumber.core.ScenarioContext; import org.opensmartgridplatform.cucumber.core.Wait; import org.opensmartgridplatform.cucumber.platform.glue.steps.ws.GenericResponseSteps; import org.opensmartgridplatform.cucumber.platform.publiclighting.PlatformPubliclightingDefaults; import org.opensmartgridplatform.cucumber.platform.publiclighting.PlatformPubliclightingKeys; import org.opensmartgridplatform.cucumber.platform.publiclighting.support.ws.publiclighting.PublicLightingAdHocManagementClient; import cucumber.api.java.en.Then; import cucumber.api.java.en.When; /** * Class with all the set light requests steps */ public class SetLightSteps { @Autowired private PublicLightingAdHocManagementClient client; private static final Logger LOGGER = LoggerFactory.getLogger(SetLightSteps.class); /** * Sends a Set Light request to the platform for a given device * identification. * * @param requestParameters * The table with the request parameters. * @throws Throwable */ @When("^receiving a set light request$") public void receivingASetLightRequest(final Map<String, String> requestParameters) throws Throwable { final SetLightRequest request = new SetLightRequest(); request.setDeviceIdentification( getString(requestParameters, PlatformPubliclightingKeys.KEY_DEVICE_IDENTIFICATION, PlatformPubliclightingDefaults.DEFAULT_DEVICE_IDENTIFICATION)); final LightValue lightValue = new LightValue(); lightValue.setIndex(getInteger(requestParameters, PlatformPubliclightingKeys.KEY_INDEX, PlatformPubliclightingDefaults.DEFAULT_INDEX)); if (requestParameters.containsKey(PlatformPubliclightingKeys.KEY_DIMVALUE) && !StringUtils.isEmpty(requestParameters.get(PlatformPubliclightingKeys.KEY_DIMVALUE))) { lightValue.setDimValue(getInteger(requestParameters, PlatformPubliclightingKeys.KEY_DIMVALUE, PlatformPubliclightingDefaults.DEFAULT_DIMVALUE)); } lightValue.setOn(getBoolean(requestParameters, PlatformPubliclightingKeys.KEY_ON, PlatformPubliclightingDefaults.DEFAULT_ON)); request.getLightValue().add(lightValue); try { ScenarioContext.current().put(PlatformPubliclightingKeys.RESPONSE, this.client.setLight(request)); } catch (final SoapFaultClientException ex) { ScenarioContext.current().put(PlatformPubliclightingKeys.RESPONSE, ex); } } @When("^receiving a set light request with \"([^\"]*)\" valid lightvalues and \"([^\"]*)\" invalid lightvalues$") public void receivingAsetLightRequestWithValidLightValuesAndInvalidLightValues(final Integer nofValidLightValues, final Integer nofInvalidLightValues, final Map<String, String> requestParameters) throws Throwable { final SetLightRequest request = new SetLightRequest(); request.setDeviceIdentification( getString(requestParameters, PlatformPubliclightingKeys.KEY_DEVICE_IDENTIFICATION, PlatformPubliclightingDefaults.DEFAULT_DEVICE_IDENTIFICATION)); for (int i = 0; i < nofValidLightValues; i++) { final LightValue lightValue = new LightValue(); lightValue.setIndex(getInteger(requestParameters, PlatformPubliclightingKeys.KEY_INDEX, PlatformPubliclightingDefaults.DEFAULT_INDEX)); lightValue.setDimValue(getInteger(requestParameters, PlatformPubliclightingKeys.KEY_DIMVALUE, PlatformPubliclightingDefaults.DEFAULT_DIMVALUE)); lightValue.setOn(getBoolean(requestParameters, PlatformPubliclightingKeys.KEY_ON, PlatformPubliclightingDefaults.DEFAULT_ON)); request.getLightValue().add(lightValue); } for (int i = 0; i < nofInvalidLightValues; i++) { final LightValue lightValue = new LightValue(); lightValue.setIndex(getInteger(requestParameters, PlatformPubliclightingKeys.KEY_INDEX, PlatformPubliclightingDefaults.DEFAULT_INDEX)); lightValue.setDimValue(50); lightValue.setOn(false); request.getLightValue().add(lightValue); } try { ScenarioContext.current().put(PlatformPubliclightingKeys.RESPONSE, this.client.setLight(request)); } catch (final SoapFaultClientException ex) { ScenarioContext.current().put(PlatformPubliclightingKeys.RESPONSE, ex); } } @When("^receiving a set light request with \"([^\"]*)\" light values$") public void receivingASetLightRequestWithLightValues(final Integer nofLightValues, final Map<String, String> requestParameters) throws Throwable { final SetLightRequest request = new SetLightRequest(); request.setDeviceIdentification( getString(requestParameters, PlatformPubliclightingKeys.KEY_DEVICE_IDENTIFICATION, PlatformPubliclightingDefaults.DEFAULT_DEVICE_IDENTIFICATION)); for (int i = 0; i < nofLightValues; i++) { final LightValue lightValue = new LightValue(); lightValue.setIndex(getInteger(requestParameters, PlatformPubliclightingKeys.KEY_INDEX, PlatformPubliclightingDefaults.DEFAULT_INDEX)); lightValue.setDimValue(getInteger(requestParameters, PlatformPubliclightingKeys.KEY_DIMVALUE, PlatformPubliclightingDefaults.DEFAULT_DIMVALUE)); lightValue.setOn(getBoolean(requestParameters, PlatformPubliclightingKeys.KEY_ON, PlatformPubliclightingDefaults.DEFAULT_ON)); request.getLightValue().add(lightValue); } try { ScenarioContext.current().put(PlatformPubliclightingKeys.RESPONSE, this.client.setLight(request)); } catch (final SoapFaultClientException ex) { ScenarioContext.current().put(PlatformPubliclightingKeys.RESPONSE, ex); } } /** * The check for the response from the Platform. * * @param expectedResponseData * The table with the expected fields in the response. * @apiNote The response will contain the correlation uid, so store that in the * current scenario context for later use. * @throws Throwable */ @Then("^the set light async response contains$") public void theSetLightResponseContains(final Map<String, String> expectedResponseData) throws Throwable { final SetLightAsyncResponse asyncResponse = (SetLightAsyncResponse) ScenarioContext.current() .get(PlatformPubliclightingKeys.RESPONSE); Assert.assertNotNull(asyncResponse.getAsyncResponse().getCorrelationUid()); Assert.assertEquals(getString(expectedResponseData, PlatformPubliclightingKeys.KEY_DEVICE_IDENTIFICATION), asyncResponse.getAsyncResponse().getDeviceId()); // Save the returned CorrelationUid in the Scenario related context for // further use. saveCorrelationUidInScenarioContext(asyncResponse.getAsyncResponse().getCorrelationUid(), getString(expectedResponseData, PlatformPubliclightingKeys.KEY_ORGANIZATION_IDENTIFICATION, PlatformPubliclightingDefaults.DEFAULT_ORGANIZATION_IDENTIFICATION)); LOGGER.info("Got CorrelationUid: [" + ScenarioContext.current().get(PlatformPubliclightingKeys.KEY_CORRELATION_UID) + "]"); } @Then("^the set light response contains soap fault$") public void theSetLightResponseContainsSoapFault(final Map<String, String> expectedResult) { GenericResponseSteps.verifySoapFault(expectedResult); } @Then("^the platform buffers a set light response message for device \"([^\"]*)\"$") public void thePlatformBuffersASetLightResponseMessage(final String deviceIdentification, final Map<String, String> expectedResult) throws Throwable { final SetLightAsyncRequest request = new SetLightAsyncRequest(); final AsyncRequest asyncRequest = new AsyncRequest(); asyncRequest.setDeviceId(deviceIdentification); asyncRequest.setCorrelationUid( (String) ScenarioContext.current().get(PlatformPubliclightingKeys.KEY_CORRELATION_UID)); request.setAsyncRequest(asyncRequest); Wait.until(() -> { SetLightResponse response = null; try { response = this.client.getSetLightResponse(request); } catch (final Exception e) { // do nothing } Assert.assertNotNull(response); Assert.assertEquals( Enum.valueOf(OsgpResultType.class, expectedResult.get(PlatformPubliclightingKeys.KEY_RESULT)), response.getResult()); }); } }
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.internal.operators.observable; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import java.io.IOException; import java.util.List; import java.util.concurrent.*; import org.junit.*; import org.mockito.InOrder; import io.reactivex.*; import io.reactivex.disposables.*; import io.reactivex.exceptions.TestException; import io.reactivex.observers.TestObserver; import io.reactivex.plugins.RxJavaPlugins; import io.reactivex.schedulers.TestScheduler; import io.reactivex.subjects.PublishSubject; public class ObservableTimeoutTests { private PublishSubject<String> underlyingSubject; private TestScheduler testScheduler; private Observable<String> withTimeout; private static final long TIMEOUT = 3; private static final TimeUnit TIME_UNIT = TimeUnit.SECONDS; @Before public void setUp() { underlyingSubject = PublishSubject.create(); testScheduler = new TestScheduler(); withTimeout = underlyingSubject.timeout(TIMEOUT, TIME_UNIT, testScheduler); } @Test public void shouldNotTimeoutIfOnNextWithinTimeout() { Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); withTimeout.subscribe(to); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); underlyingSubject.onNext("One"); verify(observer).onNext("One"); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); verify(observer, never()).onError(any(Throwable.class)); to.dispose(); } @Test public void shouldNotTimeoutIfSecondOnNextWithinTimeout() { Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); withTimeout.subscribe(to); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); underlyingSubject.onNext("One"); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); underlyingSubject.onNext("Two"); verify(observer).onNext("Two"); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); verify(observer, never()).onError(any(Throwable.class)); to.dispose(); } @Test public void shouldTimeoutIfOnNextNotWithinTimeout() { Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); withTimeout.subscribe(to); testScheduler.advanceTimeBy(TIMEOUT + 1, TimeUnit.SECONDS); verify(observer).onError(any(TimeoutException.class)); to.dispose(); } @Test public void shouldTimeoutIfSecondOnNextNotWithinTimeout() { Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); withTimeout.subscribe(observer); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); underlyingSubject.onNext("One"); verify(observer).onNext("One"); testScheduler.advanceTimeBy(TIMEOUT + 1, TimeUnit.SECONDS); verify(observer).onError(any(TimeoutException.class)); to.dispose(); } @Test public void shouldCompleteIfUnderlyingComletes() { Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); withTimeout.subscribe(observer); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); underlyingSubject.onComplete(); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); verify(observer).onComplete(); verify(observer, never()).onError(any(Throwable.class)); to.dispose(); } @Test public void shouldErrorIfUnderlyingErrors() { Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); withTimeout.subscribe(observer); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); underlyingSubject.onError(new UnsupportedOperationException()); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); verify(observer).onError(any(UnsupportedOperationException.class)); to.dispose(); } @Test public void shouldSwitchToOtherIfOnNextNotWithinTimeout() { Observable<String> other = Observable.just("a", "b", "c"); Observable<String> source = underlyingSubject.timeout(TIMEOUT, TIME_UNIT, testScheduler, other); Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); source.subscribe(to); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); underlyingSubject.onNext("One"); testScheduler.advanceTimeBy(4, TimeUnit.SECONDS); underlyingSubject.onNext("Two"); InOrder inOrder = inOrder(observer); inOrder.verify(observer, times(1)).onNext("One"); inOrder.verify(observer, times(1)).onNext("a"); inOrder.verify(observer, times(1)).onNext("b"); inOrder.verify(observer, times(1)).onNext("c"); inOrder.verify(observer, times(1)).onComplete(); inOrder.verifyNoMoreInteractions(); to.dispose(); } @Test public void shouldSwitchToOtherIfOnErrorNotWithinTimeout() { Observable<String> other = Observable.just("a", "b", "c"); Observable<String> source = underlyingSubject.timeout(TIMEOUT, TIME_UNIT, testScheduler, other); Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); source.subscribe(to); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); underlyingSubject.onNext("One"); testScheduler.advanceTimeBy(4, TimeUnit.SECONDS); underlyingSubject.onError(new UnsupportedOperationException()); InOrder inOrder = inOrder(observer); inOrder.verify(observer, times(1)).onNext("One"); inOrder.verify(observer, times(1)).onNext("a"); inOrder.verify(observer, times(1)).onNext("b"); inOrder.verify(observer, times(1)).onNext("c"); inOrder.verify(observer, times(1)).onComplete(); inOrder.verifyNoMoreInteractions(); to.dispose(); } @Test public void shouldSwitchToOtherIfOnCompletedNotWithinTimeout() { Observable<String> other = Observable.just("a", "b", "c"); Observable<String> source = underlyingSubject.timeout(TIMEOUT, TIME_UNIT, testScheduler, other); Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); source.subscribe(to); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); underlyingSubject.onNext("One"); testScheduler.advanceTimeBy(4, TimeUnit.SECONDS); underlyingSubject.onComplete(); InOrder inOrder = inOrder(observer); inOrder.verify(observer, times(1)).onNext("One"); inOrder.verify(observer, times(1)).onNext("a"); inOrder.verify(observer, times(1)).onNext("b"); inOrder.verify(observer, times(1)).onNext("c"); inOrder.verify(observer, times(1)).onComplete(); inOrder.verifyNoMoreInteractions(); to.dispose(); } @Test public void shouldSwitchToOtherAndCanBeUnsubscribedIfOnNextNotWithinTimeout() { PublishSubject<String> other = PublishSubject.create(); Observable<String> source = underlyingSubject.timeout(TIMEOUT, TIME_UNIT, testScheduler, other); Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); source.subscribe(to); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); underlyingSubject.onNext("One"); testScheduler.advanceTimeBy(4, TimeUnit.SECONDS); underlyingSubject.onNext("Two"); other.onNext("a"); other.onNext("b"); to.dispose(); // The following messages should not be delivered. other.onNext("c"); other.onNext("d"); other.onComplete(); InOrder inOrder = inOrder(observer); inOrder.verify(observer, times(1)).onNext("One"); inOrder.verify(observer, times(1)).onNext("a"); inOrder.verify(observer, times(1)).onNext("b"); inOrder.verifyNoMoreInteractions(); } @Test public void shouldTimeoutIfSynchronizedObservableEmitFirstOnNextNotWithinTimeout() throws InterruptedException { final CountDownLatch exit = new CountDownLatch(1); final CountDownLatch timeoutSetuped = new CountDownLatch(1); final Observer<String> observer = TestHelper.mockObserver(); final TestObserver<String> to = new TestObserver<String>(observer); new Thread(new Runnable() { @Override public void run() { Observable.unsafeCreate(new ObservableSource<String>() { @Override public void subscribe(Observer<? super String> observer) { observer.onSubscribe(Disposables.empty()); try { timeoutSetuped.countDown(); exit.await(); } catch (InterruptedException e) { e.printStackTrace(); } observer.onNext("a"); observer.onComplete(); } }).timeout(1, TimeUnit.SECONDS, testScheduler) .subscribe(to); } }).start(); timeoutSetuped.await(); testScheduler.advanceTimeBy(2, TimeUnit.SECONDS); InOrder inOrder = inOrder(observer); inOrder.verify(observer, times(1)).onError(isA(TimeoutException.class)); inOrder.verifyNoMoreInteractions(); exit.countDown(); // exit the thread } @Test public void shouldUnsubscribeFromUnderlyingSubscriptionOnTimeout() throws InterruptedException { // From https://github.com/ReactiveX/RxJava/pull/951 final Disposable s = mock(Disposable.class); Observable<String> never = Observable.unsafeCreate(new ObservableSource<String>() { @Override public void subscribe(Observer<? super String> observer) { observer.onSubscribe(s); } }); TestScheduler testScheduler = new TestScheduler(); Observable<String> observableWithTimeout = never.timeout(1000, TimeUnit.MILLISECONDS, testScheduler); Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); observableWithTimeout.subscribe(to); testScheduler.advanceTimeBy(2000, TimeUnit.MILLISECONDS); InOrder inOrder = inOrder(observer); inOrder.verify(observer).onError(isA(TimeoutException.class)); inOrder.verifyNoMoreInteractions(); verify(s, times(1)).dispose(); } @Test @Ignore("s should be considered cancelled upon executing onComplete and not expect downstream to call cancel") public void shouldUnsubscribeFromUnderlyingSubscriptionOnImmediatelyComplete() { // From https://github.com/ReactiveX/RxJava/pull/951 final Disposable s = mock(Disposable.class); Observable<String> immediatelyComplete = Observable.unsafeCreate(new ObservableSource<String>() { @Override public void subscribe(Observer<? super String> observer) { observer.onSubscribe(s); observer.onComplete(); } }); TestScheduler testScheduler = new TestScheduler(); Observable<String> observableWithTimeout = immediatelyComplete.timeout(1000, TimeUnit.MILLISECONDS, testScheduler); Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); observableWithTimeout.subscribe(to); testScheduler.advanceTimeBy(2000, TimeUnit.MILLISECONDS); InOrder inOrder = inOrder(observer); inOrder.verify(observer).onComplete(); inOrder.verifyNoMoreInteractions(); verify(s, times(1)).dispose(); } @Test @Ignore("s should be considered cancelled upon executing onError and not expect downstream to call cancel") public void shouldUnsubscribeFromUnderlyingSubscriptionOnImmediatelyErrored() throws InterruptedException { // From https://github.com/ReactiveX/RxJava/pull/951 final Disposable s = mock(Disposable.class); Observable<String> immediatelyError = Observable.unsafeCreate(new ObservableSource<String>() { @Override public void subscribe(Observer<? super String> observer) { observer.onSubscribe(s); observer.onError(new IOException("Error")); } }); TestScheduler testScheduler = new TestScheduler(); Observable<String> observableWithTimeout = immediatelyError.timeout(1000, TimeUnit.MILLISECONDS, testScheduler); Observer<String> observer = TestHelper.mockObserver(); TestObserver<String> to = new TestObserver<String>(observer); observableWithTimeout.subscribe(to); testScheduler.advanceTimeBy(2000, TimeUnit.MILLISECONDS); InOrder inOrder = inOrder(observer); inOrder.verify(observer).onError(isA(IOException.class)); inOrder.verifyNoMoreInteractions(); verify(s, times(1)).dispose(); } @Test public void shouldUnsubscribeFromUnderlyingSubscriptionOnDispose() { final PublishSubject<String> subject = PublishSubject.create(); final TestScheduler scheduler = new TestScheduler(); final TestObserver<String> observer = subject .timeout(100, TimeUnit.MILLISECONDS, scheduler) .test(); assertTrue(subject.hasObservers()); observer.dispose(); assertFalse(subject.hasObservers()); } @Test public void timedAndOther() { Observable.never().timeout(100, TimeUnit.MILLISECONDS, Observable.just(1)) .test() .awaitDone(5, TimeUnit.SECONDS) .assertResult(1); } @Test public void disposed() { TestHelper.checkDisposed(PublishSubject.create().timeout(1, TimeUnit.DAYS)); TestHelper.checkDisposed(PublishSubject.create().timeout(1, TimeUnit.DAYS, Observable.just(1))); } @Test public void timedErrorOther() { Observable.error(new TestException()) .timeout(1, TimeUnit.DAYS, Observable.just(1)) .test() .assertFailure(TestException.class); } @Test public void timedError() { Observable.error(new TestException()) .timeout(1, TimeUnit.DAYS) .test() .assertFailure(TestException.class); } @Test public void timedEmptyOther() { Observable.empty() .timeout(1, TimeUnit.DAYS, Observable.just(1)) .test() .assertResult(); } @Test public void timedEmpty() { Observable.empty() .timeout(1, TimeUnit.DAYS) .test() .assertResult(); } @Test public void badSource() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new Observable<Integer>() { @Override protected void subscribeActual(Observer<? super Integer> observer) { observer.onSubscribe(Disposables.empty()); observer.onNext(1); observer.onComplete(); observer.onNext(2); observer.onError(new TestException()); observer.onComplete(); } } .timeout(1, TimeUnit.DAYS) .test() .assertResult(1); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void badSourceOther() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new Observable<Integer>() { @Override protected void subscribeActual(Observer<? super Integer> observer) { observer.onSubscribe(Disposables.empty()); observer.onNext(1); observer.onComplete(); observer.onNext(2); observer.onError(new TestException()); observer.onComplete(); } } .timeout(1, TimeUnit.DAYS, Observable.just(3)) .test() .assertResult(1); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void timedTake() { PublishSubject<Integer> ps = PublishSubject.create(); TestObserver<Integer> to = ps.timeout(1, TimeUnit.DAYS) .take(1) .test(); assertTrue(ps.hasObservers()); ps.onNext(1); assertFalse(ps.hasObservers()); to.assertResult(1); } @Test public void timedFallbackTake() { PublishSubject<Integer> ps = PublishSubject.create(); TestObserver<Integer> to = ps.timeout(1, TimeUnit.DAYS, Observable.just(2)) .take(1) .test(); assertTrue(ps.hasObservers()); ps.onNext(1); assertFalse(ps.hasObservers()); to.assertResult(1); } @Test public void fallbackErrors() { Observable.never() .timeout(1, TimeUnit.MILLISECONDS, Observable.error(new TestException())) .test() .awaitDone(5, TimeUnit.SECONDS) .assertFailure(TestException.class); } @Test public void onNextOnTimeoutRace() { for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { final TestScheduler sch = new TestScheduler(); final PublishSubject<Integer> ps = PublishSubject.create(); TestObserver<Integer> to = ps.timeout(1, TimeUnit.SECONDS, sch).test(); Runnable r1 = new Runnable() { @Override public void run() { ps.onNext(1); } }; Runnable r2 = new Runnable() { @Override public void run() { sch.advanceTimeBy(1, TimeUnit.SECONDS); } }; TestHelper.race(r1, r2); if (to.valueCount() != 0) { if (to.errorCount() != 0) { to.assertFailure(TimeoutException.class, 1); } else { to.assertValuesOnly(1); } } else { to.assertFailure(TimeoutException.class); } } } @Test public void onNextOnTimeoutRaceFallback() { for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { final TestScheduler sch = new TestScheduler(); final PublishSubject<Integer> ps = PublishSubject.create(); TestObserver<Integer> to = ps.timeout(1, TimeUnit.SECONDS, sch, Observable.just(2)).test(); Runnable r1 = new Runnable() { @Override public void run() { ps.onNext(1); } }; Runnable r2 = new Runnable() { @Override public void run() { sch.advanceTimeBy(1, TimeUnit.SECONDS); } }; TestHelper.race(r1, r2); if (to.isTerminated()) { int c = to.valueCount(); if (c == 1) { int v = to.values().get(0); assertTrue("" + v, v == 1 || v == 2); } else { to.assertResult(1, 2); } } else { to.assertValuesOnly(1); } } } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.guvnor.ala.openshift.config.impl; import org.guvnor.ala.config.CloneableConfig; import org.guvnor.ala.openshift.config.OpenShiftRuntimeConfig; import org.guvnor.ala.runtime.providers.ProviderId; /** * Cloneable implementation of OpenShiftRuntimeConfig. */ public class OpenShiftRuntimeConfigImpl implements OpenShiftRuntimeConfig, CloneableConfig<OpenShiftRuntimeConfig> { private String runtimeName; private ProviderId providerId; private String applicationName; private String kieServerContainerDeployment; private String projectName; private String resourceSecretsUri; private String resourceStreamsUri; private String resourceTemplateName; private String resourceTemplateParamDelimiter; private String resourceTemplateParamAssigner; private String resourceTemplateParamValues; private String resourceTemplateUri; private String serviceName; public OpenShiftRuntimeConfigImpl() { } public OpenShiftRuntimeConfigImpl( String runtimeName, ProviderId providerId, String applicationName, String kieServerContainerDeployment, String projectName, String resourceSecretsUri, String resourceStreamsUri, String resourceTemplateName, String resourceTemplateParamDelimiter, String resourceTemplateParamAssigner, String resourceTemplateParamValues, String resourceTemplateUri, String serviceName) { this.runtimeName = runtimeName; this.providerId = providerId; this.applicationName = applicationName; this.kieServerContainerDeployment = kieServerContainerDeployment; this.projectName = projectName; this.resourceSecretsUri = resourceSecretsUri; this.resourceStreamsUri = resourceStreamsUri; this.resourceTemplateName = resourceTemplateName; this.resourceTemplateParamDelimiter = resourceTemplateParamDelimiter; this.resourceTemplateParamAssigner = resourceTemplateParamAssigner; this.resourceTemplateParamValues = resourceTemplateParamValues; this.resourceTemplateUri = resourceTemplateUri; this.serviceName = serviceName; } @Override public String getRuntimeName() { return runtimeName; } public void setRuntimeName(String runtimeName) { this.runtimeName = runtimeName; } @Override public ProviderId getProviderId() { return providerId; } public void setProviderId(ProviderId providerId) { this.providerId = providerId; } @Override public String getApplicationName() { return applicationName; } public void setApplicationName(String applicationName) { this.applicationName = applicationName; } @Override public String getKieServerContainerDeployment() { return kieServerContainerDeployment; } public void setKieServerContainerDeployment(String kieServerContainerDeployment) { this.kieServerContainerDeployment = kieServerContainerDeployment; } @Override public String getProjectName() { return projectName; } public void setProjectName(String projectName) { this.projectName = projectName; } @Override public String getResourceSecretsUri() { return resourceSecretsUri; } public void setResourceSecretsUri(String resourceSecretsUri) { this.resourceSecretsUri = resourceSecretsUri; } @Override public String getResourceStreamsUri() { return resourceStreamsUri; } public void setResourceStreamsUri(String resourceStreamsUri) { this.resourceStreamsUri = resourceStreamsUri; } @Override public String getResourceTemplateName() { return resourceTemplateName; } public void setResourceTemplateName(String resourceTemplateName) { this.resourceTemplateName = resourceTemplateName; } @Override public String getResourceTemplateParamDelimiter() { return resourceTemplateParamDelimiter; } public void setResourceTemplateParamDelimiter(String resourceTemplateParamDelimiter) { this.resourceTemplateParamDelimiter = resourceTemplateParamDelimiter; } @Override public String getResourceTemplateParamAssigner() { return resourceTemplateParamAssigner; } public void setResourceTemplateParamAssigner(String resourceTemplateParamAssigner) { this.resourceTemplateParamAssigner = resourceTemplateParamAssigner; } @Override public String getResourceTemplateParamValues() { return resourceTemplateParamValues; } public void setResourceTemplateParamValues(String resourceTemplateParamValues) { this.resourceTemplateParamValues = resourceTemplateParamValues; } @Override public String getResourceTemplateUri() { return resourceTemplateUri; } public void setResourceTemplateUri(String resourceTemplateUri) { this.resourceTemplateUri = resourceTemplateUri; } @Override public String getServiceName() { return serviceName; } public void setServiceName(String serviceName) { this.serviceName = serviceName; } @Override public OpenShiftRuntimeConfig asNewClone(final OpenShiftRuntimeConfig source) { return new OpenShiftRuntimeConfigImpl( source.getRuntimeName(), source.getProviderId(), source.getApplicationName(), source.getKieServerContainerDeployment(), source.getProjectName(), source.getResourceSecretsUri(), source.getResourceStreamsUri(), source.getResourceTemplateName(), source.getResourceTemplateParamDelimiter(), source.getResourceTemplateParamAssigner(), source.getResourceTemplateParamValues(), source.getResourceTemplateUri(), source.getServiceName()); } @Override public String toString() { return "OpenShiftRuntimeConfigImpl{" + ", runtimeName=" + runtimeName + ", providerId=" + providerId + ", applicationName=" + applicationName + ", kieServerContainerDeployment=" + kieServerContainerDeployment + ", projectName=" + projectName + ", resourceSecretsUri=" + resourceSecretsUri + ", resourceStreamsUri=" + resourceStreamsUri + ", resourceTemplateName=" + resourceTemplateName + ", resourceTemplateParamDelimiter=" + resourceTemplateParamDelimiter + ", resourceTemplateParamAssigner=" + resourceTemplateParamAssigner + ", resourceTemplateParamValues=" + resourceTemplateParamValues + ", resourceTemplateUri=" + resourceTemplateUri + ", serviceName=" + serviceName + "}"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((applicationName == null) ? 0 : applicationName.hashCode()); result = prime * result + ((kieServerContainerDeployment == null) ? 0 : kieServerContainerDeployment.hashCode()); result = prime * result + ((projectName == null) ? 0 : projectName.hashCode()); result = prime * result + ((providerId == null) ? 0 : providerId.hashCode()); result = prime * result + ((resourceSecretsUri == null) ? 0 : resourceSecretsUri.hashCode()); result = prime * result + ((resourceStreamsUri == null) ? 0 : resourceStreamsUri.hashCode()); result = prime * result + ((resourceTemplateName == null) ? 0 : resourceTemplateName.hashCode()); result = prime * result + ((resourceTemplateParamAssigner == null) ? 0 : resourceTemplateParamAssigner.hashCode()); result = prime * result + ((resourceTemplateParamDelimiter == null) ? 0 : resourceTemplateParamDelimiter.hashCode()); result = prime * result + ((resourceTemplateParamValues == null) ? 0 : resourceTemplateParamValues.hashCode()); result = prime * result + ((resourceTemplateUri == null) ? 0 : resourceTemplateUri.hashCode()); result = prime * result + ((runtimeName == null) ? 0 : runtimeName.hashCode()); result = prime * result + ((serviceName == null) ? 0 : serviceName.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof OpenShiftRuntimeConfigImpl)) { return false; } OpenShiftRuntimeConfigImpl other = (OpenShiftRuntimeConfigImpl) obj; if (applicationName == null) { if (other.applicationName != null) { return false; } } else if (!applicationName.equals(other.applicationName)) { return false; } if (kieServerContainerDeployment == null) { if (other.kieServerContainerDeployment != null) { return false; } } else if (!kieServerContainerDeployment.equals(other.kieServerContainerDeployment)) { return false; } if (projectName == null) { if (other.projectName != null) { return false; } } else if (!projectName.equals(other.projectName)) { return false; } if (providerId == null) { if (other.providerId != null) { return false; } } else if (!providerId.equals(other.providerId)) { return false; } if (resourceSecretsUri == null) { if (other.resourceSecretsUri != null) { return false; } } else if (!resourceSecretsUri.equals(other.resourceSecretsUri)) { return false; } if (resourceStreamsUri == null) { if (other.resourceStreamsUri != null) { return false; } } else if (!resourceStreamsUri.equals(other.resourceStreamsUri)) { return false; } if (resourceTemplateName == null) { if (other.resourceTemplateName != null) { return false; } } else if (!resourceTemplateName.equals(other.resourceTemplateName)) { return false; } if (resourceTemplateParamAssigner == null) { if (other.resourceTemplateParamAssigner != null) { return false; } } else if (!resourceTemplateParamAssigner.equals(other.resourceTemplateParamAssigner)) { return false; } if (resourceTemplateParamDelimiter == null) { if (other.resourceTemplateParamDelimiter != null) { return false; } } else if (!resourceTemplateParamDelimiter.equals(other.resourceTemplateParamDelimiter)) { return false; } if (resourceTemplateParamValues == null) { if (other.resourceTemplateParamValues != null) { return false; } } else if (!resourceTemplateParamValues.equals(other.resourceTemplateParamValues)) { return false; } if (resourceTemplateUri == null) { if (other.resourceTemplateUri != null) { return false; } } else if (!resourceTemplateUri.equals(other.resourceTemplateUri)) { return false; } if (runtimeName == null) { if (other.runtimeName != null) { return false; } } else if (!runtimeName.equals(other.runtimeName)) { return false; } if (serviceName == null) { if (other.serviceName != null) { return false; } } else if (!serviceName.equals(other.serviceName)) { return false; } return true; } }
package org.sakaiproject.content.util; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.UnsupportedCharsetException; import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; import javax.activation.MimetypesFileTypeMap; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.component.cover.ServerConfigurationService; import org.sakaiproject.content.api.ContentCollection; import org.sakaiproject.content.api.ContentCollectionEdit; import org.sakaiproject.content.cover.ContentHostingService; import org.sakaiproject.content.api.ContentResource; import org.sakaiproject.content.api.ContentResourceEdit; import org.sakaiproject.entity.api.Entity; import org.sakaiproject.entity.api.Reference; import org.sakaiproject.entity.api.ResourcePropertiesEdit; import org.sakaiproject.event.api.NotificationService; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.exception.IdUsedException; import org.sakaiproject.exception.PermissionException; import org.sakaiproject.exception.ServerOverloadException; import org.sakaiproject.exception.TypeException; import org.sakaiproject.tool.api.ToolSession; import org.sakaiproject.tool.cover.SessionManager; import org.sakaiproject.util.Resource; import org.sakaiproject.util.ResourceLoader; @SuppressWarnings({ "deprecation", "restriction" }) public class ZipContentUtil { protected static final Log LOG = LogFactory.getLog(ZipContentUtil.class); private static final String ZIP_EXTENSION = ".zip"; private static final int BUFFER_SIZE = 32000; private static final MimetypesFileTypeMap mime = new MimetypesFileTypeMap(); public static final String PREFIX = "resources."; public static final String REQUEST = "request."; private static final String STATE_HOME_COLLECTION_ID = PREFIX + REQUEST + "collection_home"; private static final String STATE_HOME_COLLECTION_DISPLAY_NAME = PREFIX + REQUEST + "collection_home_display_name"; public static final String STATE_MESSAGE = "message"; /** * Maximum number of files to extract from a zip archive (1000) */ public static final int MAX_ZIP_EXTRACT_FILES_DEFAULT = 1000; private static Integer MAX_ZIP_EXTRACT_FILES; private static final String DEFAULT_RESOURCECLASS = "org.sakaiproject.localization.util.ContentProperties"; private static final String DEFAULT_RESOURCEBUNDLE = "org.sakaiproject.localization.bundle.content.content"; private static final String RESOURCECLASS = "resource.class.content"; private static final String RESOURCEBUNDLE = "resource.bundle.content"; private static ResourceLoader rb = new Resource().getLoader(ServerConfigurationService.getString(RESOURCECLASS, DEFAULT_RESOURCECLASS), ServerConfigurationService.getString(RESOURCEBUNDLE, DEFAULT_RESOURCEBUNDLE)); public static int getMaxZipExtractFiles() { if(MAX_ZIP_EXTRACT_FILES == null){ MAX_ZIP_EXTRACT_FILES = ServerConfigurationService.getInt(org.sakaiproject.content.api.ContentHostingService.RESOURCES_ZIP_EXPAND_MAX,MAX_ZIP_EXTRACT_FILES_DEFAULT); } if (MAX_ZIP_EXTRACT_FILES <= 0) { MAX_ZIP_EXTRACT_FILES = MAX_ZIP_EXTRACT_FILES_DEFAULT; // any less than this is useless so probably a mistake LOG.warn("content.zip.expand.maxfiles is set to a value less than or equal to 0, defaulting to "+MAX_ZIP_EXTRACT_FILES_DEFAULT); } return MAX_ZIP_EXTRACT_FILES; } /** * Compresses a ContentCollection to a new zip archive with the same folder name * * @param reference sakai entity reference * @throws Exception on failure */ public void compressFolder(Reference reference) { File temp = null; FileInputStream fis = null; ToolSession toolSession = SessionManager.getCurrentToolSession(); try { // Create the compressed archive in the filesystem ZipOutputStream out = null; try { temp = File.createTempFile("sakai_content-", ".tmp"); ContentCollection collection = ContentHostingService.getCollection(reference.getId()); out = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(temp),BUFFER_SIZE)); storeContentCollection(reference.getId(),collection,out); } finally { if (out != null) { try { out.close(); } catch (IOException e) { } } } // Store the compressed archive in the repository String resourceId = reference.getId().substring(0,reference.getId().lastIndexOf(Entity.SEPARATOR)); String resourceName = extractName(resourceId); String homeCollectionId = (String) toolSession.getAttribute(STATE_HOME_COLLECTION_ID); if(homeCollectionId != null && homeCollectionId.equals(reference.getId())){ //place the zip file into the home folder of the resource tool resourceId = reference.getId() + resourceName; String homeName = (String) toolSession.getAttribute(STATE_HOME_COLLECTION_DISPLAY_NAME); if(homeName != null){ resourceName = homeName; } } int count = 0; ContentResourceEdit resourceEdit = null; while(true){ try{ String newResourceId = resourceId; String newResourceName = resourceName; count++; if(count > 1){ //previous naming convention failed, try another one newResourceId += "_" + count; newResourceName += "_" + count; } newResourceId += ZIP_EXTENSION; newResourceName += ZIP_EXTENSION; resourceEdit = ContentHostingService.addResource(newResourceId); //success, so keep track of name/id resourceId = newResourceId; resourceName = newResourceName; break; }catch(IdUsedException e){ //do nothing, just let it loop again }catch(Exception e){ throw new Exception(e); } } fis = new FileInputStream(temp); resourceEdit.setContent(fis); resourceEdit.setContentType(mime.getContentType(resourceId)); ResourcePropertiesEdit props = resourceEdit.getPropertiesEdit(); props.addProperty(ResourcePropertiesEdit.PROP_DISPLAY_NAME, resourceName); ContentHostingService.commitResource(resourceEdit, NotificationService.NOTI_NONE); } catch (PermissionException pE){ addAlert(toolSession, rb.getString("permission_error_zip")); LOG.warn(pE); } catch (Exception e) { addAlert(toolSession, rb.getString("generic_error_zip")); LOG.error(e); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { } } if (temp != null && temp.exists()) { if (!temp.delete()) { LOG.warn("failed to remove temp file"); } } } } private void addAlert(ToolSession toolSession, String alert){ String errorMessage = (String) toolSession.getAttribute(STATE_MESSAGE); if(errorMessage == null){ errorMessage = alert; }else{ errorMessage += "\n\n" + alert; } toolSession.setAttribute(STATE_MESSAGE, errorMessage); } /** * Extracts a compressed (zip) ContentResource to a new folder with the same name. * * @param reference the sakai entity reference * @throws Exception on failure * @deprecated 11 Oct 2011 -AZ, use {@link #extractArchive(String)} instead */ public void extractArchive(Reference reference) throws Exception { if (reference == null) { throw new IllegalArgumentException("reference cannot be null"); } extractArchive(reference.getId()); } /** * Extracts a compressed (zip) ContentResource to a new folder with the same name. * * @param referenceId the sakai entity reference id * @throws Exception on failure */ public void extractArchive(String referenceId) throws Exception { ContentResource resource = ContentHostingService.getResource(referenceId); String rootCollectionId = extractZipCollectionPrefix(resource); // Prepare Collection ContentCollectionEdit rootCollection = ContentHostingService.addCollection(rootCollectionId); ResourcePropertiesEdit prop = rootCollection.getPropertiesEdit(); prop.addProperty(ResourcePropertiesEdit.PROP_DISPLAY_NAME, extractZipCollectionName(resource)); ContentHostingService.commitCollection(rootCollection); // Extract Zip File File temp = null; try { temp = exportResourceToFile(resource); boolean extracted = false; for (String charsetName: getZipCharsets()) { Charset charset; try { charset = Charset.forName(charsetName); } catch (IllegalCharsetNameException | UnsupportedCharsetException e) { LOG.warn(String.format("%s is not a legal charset.", charsetName)); continue; } ZipFile zipFile = null; try { zipFile = new ZipFile(temp, charset); Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry nextElement = entries.nextElement(); if (!nextElement.getName().contains("__MACOSX")){ if (nextElement.isDirectory()) { createContentCollection(rootCollectionId, nextElement); } else { if(!nextElement.getName().contains(".DS_Store")){ createContentResource(rootCollectionId, nextElement, zipFile); } } } } extracted = true; break; } catch (Exception e) { e.printStackTrace(); LOG.warn(String.format("Cannot extract archive %s with charset %s.", referenceId, charset)); } finally { if (zipFile != null){ zipFile.close(); } } } if (!extracted) { LOG.warn(String.format("Cannot extract archives %s with any charset %s.", referenceId, getZipCharsets())); } } catch (Exception e) { e.printStackTrace(); } finally { temp.delete(); } } /** * Get a list of the files in a zip and their size * @param reference the sakai entity reference * @return a map of file names to file sizes in the zip archive * @deprecated 11 Oct 2011 -AZ, use {@link #getZipManifest(String)} */ public Map<String, Long> getZipManifest(Reference reference) { if (reference == null) { throw new IllegalArgumentException("reference cannot be null"); } return getZipManifest(reference.getId()); } /** * Get a list of the files in a zip and their size * @param referenceId the sakai entity reference id * @return a map of file names to file sizes in the zip archive */ public Map<String, Long> getZipManifest(String referenceId) { Map<String, Long> ret = new HashMap<String, Long>(); ContentResource resource; try { resource = ContentHostingService.getResource(referenceId); } catch (PermissionException e1) { return null; } catch (IdUnusedException e1) { return null; } catch (TypeException e1) { return null; } //String rootCollectionId = extractZipCollectionPrefix(resource); // Extract Zip File File temp = null; try { temp = exportResourceToFile(resource); boolean extracted = false; for (String charsetName: getZipCharsets()) { Charset charset; try { charset = Charset.forName(charsetName); } catch (IllegalCharsetNameException | UnsupportedCharsetException e) { LOG.warn(String.format("%s is not a legal charset.", charsetName)); continue; } ZipFile zipFile = null; try { zipFile = new ZipFile(temp, charset); Enumeration<? extends ZipEntry> entries = zipFile.entries(); int i = 0; //use <= getMAX_ZIP_EXTRACT_SIZE() so the returned value will be //larger than the max and then rejected while (entries.hasMoreElements() && i <= getMaxZipExtractFiles()) { ZipEntry nextElement = entries.nextElement(); ret.put(nextElement.getName(), nextElement.getSize()); i++; } extracted = true; break; } catch (Exception e) { LOG.warn(String.format("Cannot get menifest of %s with charset %s.", referenceId, charset)); } finally { if (zipFile != null){ zipFile.close(); } } } if (!extracted) { LOG.warn(String.format("Cannot get menifest of %s with any charset %s.", referenceId, getZipCharsets())); } } catch (Exception e) { e.printStackTrace(); } finally { if (temp.exists()) { if (!temp.delete()) { LOG.warn("uanble to delete temp file!"); } } } return ret; } /** * Creates a new ContentResource extracted from ZipFile * * @param rootCollectionId * @param nextElement * @param zipFile * @throws Exception */ private void createContentResource(String rootCollectionId, ZipEntry nextElement, ZipFile zipFile) throws Exception { String resourceId = rootCollectionId + nextElement.getName(); String resourceName = extractName(nextElement.getName()); ContentResourceEdit resourceEdit = ContentHostingService.addResource(resourceId); resourceEdit.setContent(zipFile.getInputStream(nextElement)); resourceEdit.setContentType(mime.getContentType(resourceName)); ResourcePropertiesEdit props = resourceEdit.getPropertiesEdit(); props.addProperty(ResourcePropertiesEdit.PROP_DISPLAY_NAME, resourceName); ContentHostingService.commitResource(resourceEdit, NotificationService.NOTI_NONE); } /** * Creates a new ContentCollection in the rootCollectionId with the element.getName() * * @param rootCollectionId * @param element * @throws Exception */ private void createContentCollection(String rootCollectionId, ZipEntry element) throws Exception { String resourceId = rootCollectionId + element.getName(); String resourceName = extractName(element.getName()); ContentCollectionEdit collection = ContentHostingService.addCollection(resourceId); ResourcePropertiesEdit props = collection.getPropertiesEdit(); props.addProperty(ResourcePropertiesEdit.PROP_DISPLAY_NAME, resourceName); ContentHostingService.commitCollection(collection); } /** * Exports a the ContentResource zip file to the operating system * * @param resource * @return */ private File exportResourceToFile(ContentResource resource) { File temp = null; FileOutputStream out = null; try { temp = File.createTempFile("sakai_content-", ".tmp"); temp.deleteOnExit(); // Write content to file out = new FileOutputStream(temp); IOUtils.copy(resource.streamContent(),out); out.flush(); } catch (IOException e) { e.printStackTrace(); } catch (ServerOverloadException e) { e.printStackTrace(); } finally { if (out !=null) { try { out.close(); } catch (IOException e) { } } } return temp; } /** * Iterates the collection.getMembers() and streams content resources recursively to the ZipOutputStream * * @param rootId * @param collection * @param out * @throws Exception */ private void storeContentCollection(String rootId, ContentCollection collection, ZipOutputStream out) throws Exception { List<String> members = collection.getMembers(); for (String memberId: members) { if (memberId.endsWith(Entity.SEPARATOR)) { ContentCollection memberCollection = ContentHostingService.getCollection(memberId); storeContentCollection(rootId,memberCollection,out); } else { ContentResource resource = ContentHostingService.getResource(memberId); storeContentResource(rootId, resource, out); } } } /** * Streams content resource to the ZipOutputStream * * @param rootId * @param resource * @param out * @throws Exception */ private void storeContentResource(String rootId, ContentResource resource, ZipOutputStream out) throws Exception { String filename = resource.getId().substring(rootId.length(),resource.getId().length()); ZipEntry zipEntry = new ZipEntry(filename); zipEntry.setSize(resource.getContentLength()); out.putNextEntry(zipEntry); InputStream contentStream = null; try { contentStream = resource.streamContent(); IOUtils.copy(contentStream, out); } finally { if (contentStream != null) { contentStream.close(); } } } private String extractZipCollectionPrefix(ContentResource resource) { String idPrefix = resource.getContainingCollection().getId() + extractZipCollectionName(resource) + Entity.SEPARATOR; return idPrefix; } private String extractName(String collectionName) { String[] tmp = collectionName.split(Entity.SEPARATOR); return tmp[tmp.length-1]; } private String extractZipCollectionName(ContentResource resource) { String tmp = extractName(resource.getId()); return tmp.substring(0, tmp.lastIndexOf(".")); } private List<String> getZipCharsets() { String[] charsetConfig = ServerConfigurationService.getStrings("content.zip.expand.charsets"); if (charsetConfig == null) { charsetConfig = new String[0]; } List<String> charsets = new ArrayList<>(Arrays.asList(charsetConfig)); // Add UTF-8 as fallback charsets.add("UTF-8"); return charsets; } }
/* * Copyright (c) 2010-2015 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.icf.dummy.resource; import java.io.FileNotFoundException; import java.net.ConnectException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import com.evolveum.midpoint.util.exception.SystemException; import org.apache.commons.lang.StringUtils; import com.evolveum.midpoint.util.DebugDumpable; import com.evolveum.midpoint.util.DebugUtil; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; /** * Resource for use with dummy ICF connector. * * This is a simple Java object that pretends to be a resource. It has accounts and * account schema. It has operations to manipulate accounts, execute scripts and so on * almost like a real resource. The purpose is to simulate a real resource with a very * little overhead. * * The resource is a singleton, therefore the resource instance can be shared by * the connector and the test code. The usual story is like this: * * 1) test class fetches first instance of the resource (getInstance). This will cause * loading of the resource class in the test (parent) classloader. * * 2) test class configures the connector (e.g. schema) usually by calling the populateWithDefaultSchema() method. * * 3) test class initializes IDM. This will cause connector initialization. The connector will fetch * the instance of dummy resource. As it was loaded by the parent classloader, it will get the same instance * as the test class. * * 4) test class invokes IDM operation. That will invoke connector and change the resource. * * 5) test class will access resource directly to see if the operation went OK. * * The dummy resource is a separate package (JAR) from the dummy connector. Connector has its own * classloader. If the resource would be the same package as connector, it will get loaded by the * connector classloader regardless whether it is already loaded by the parent classloader. * * @author Radovan Semancik * */ public class DummyResource implements DebugDumpable { private static final Trace LOGGER = TraceManager.getTrace(DummyResource.class); private String instanceName; private Map<String,DummyObject> allObjects; private Map<String,DummyAccount> accounts; private Map<String,DummyGroup> groups; private Map<String,DummyPrivilege> privileges; private List<ScriptHistoryEntry> scriptHistory; private DummyObjectClass accountObjectClass; private DummyObjectClass groupObjectClass; private DummyObjectClass privilegeObjectClass; private DummySyncStyle syncStyle; private List<DummyDelta> deltas; private int latestSyncToken; private boolean tolerateDuplicateValues = false; private boolean generateDefaultValues = false; private boolean enforceUniqueName = true; private boolean enforceSchema = true; private boolean caseIgnoreId = false; private boolean caseIgnoreValues = false; private int connectionCount = 0; private int groupMembersReadCount = 0; private Collection<String> forbiddenNames; private BreakMode schemaBreakMode = BreakMode.NONE; private BreakMode getBreakMode = BreakMode.NONE; private BreakMode addBreakMode = BreakMode.NONE; private BreakMode modifyBreakMode = BreakMode.NONE; private BreakMode deleteBreakMode = BreakMode.NONE; private boolean generateAccountDescriptionOnCreate = false; // simulates volatile behavior (on create) private boolean generateAccountDescriptionOnUpdate = false; // simulates volatile behavior (on update) // Following two properties are just copied from the connector // configuration and can be checked later. They are otherwise // completely useless. private String uselessString; private String uselessGuardedString; private static Map<String, DummyResource> instances = new HashMap<String, DummyResource>(); DummyResource() { allObjects = new ConcurrentHashMap<String,DummyObject>(); accounts = new ConcurrentHashMap<String, DummyAccount>(); groups = new ConcurrentHashMap<String, DummyGroup>(); privileges = new ConcurrentHashMap<String, DummyPrivilege>(); scriptHistory = new ArrayList<ScriptHistoryEntry>(); accountObjectClass = new DummyObjectClass(); groupObjectClass = new DummyObjectClass(); privilegeObjectClass = new DummyObjectClass(); syncStyle = DummySyncStyle.NONE; deltas = new ArrayList<DummyDelta>(); latestSyncToken = 0; } /** * Clears everything, just like the resouce was just created. */ public void reset() { allObjects.clear(); accounts.clear(); groups.clear(); privileges.clear(); scriptHistory.clear(); accountObjectClass = new DummyObjectClass(); groupObjectClass = new DummyObjectClass(); privilegeObjectClass = new DummyObjectClass(); syncStyle = DummySyncStyle.NONE; deltas.clear(); latestSyncToken = 0; resetBreakMode(); } public static DummyResource getInstance() { return getInstance(null); } public static DummyResource getInstance(String instanceName) { DummyResource instance = instances.get(instanceName); if (instance == null) { instance = new DummyResource(); instance.setInstanceName(instanceName); instances.put(instanceName, instance); } return instance; } public String getInstanceName() { return instanceName; } public void setInstanceName(String instanceName) { this.instanceName = instanceName; } public boolean isTolerateDuplicateValues() { return tolerateDuplicateValues; } public void setTolerateDuplicateValues(boolean tolerateDuplicateValues) { this.tolerateDuplicateValues = tolerateDuplicateValues; } public boolean isGenerateDefaultValues() { return generateDefaultValues; } public void setGenerateDefaultValues(boolean generateDefaultValues) { this.generateDefaultValues = generateDefaultValues; } public boolean isEnforceUniqueName() { return enforceUniqueName; } public void setEnforceUniqueName(boolean enforceUniqueName) { this.enforceUniqueName = enforceUniqueName; } public boolean isEnforceSchema() { return enforceSchema; } public void setEnforceSchema(boolean enforceSchema) { this.enforceSchema = enforceSchema; } public BreakMode getSchemaBreakMode() { return schemaBreakMode; } public void setSchemaBreakMode(BreakMode schemaBreakMode) { this.schemaBreakMode = schemaBreakMode; } public BreakMode getAddBreakMode() { return addBreakMode; } public void setAddBreakMode(BreakMode addBreakMode) { this.addBreakMode = addBreakMode; } public BreakMode getGetBreakMode() { return getBreakMode; } public void setGetBreakMode(BreakMode getBreakMode) { this.getBreakMode = getBreakMode; } public BreakMode getModifyBreakMode() { return modifyBreakMode; } public void setModifyBreakMode(BreakMode modifyBreakMode) { this.modifyBreakMode = modifyBreakMode; } public BreakMode getDeleteBreakMode() { return deleteBreakMode; } public void setDeleteBreakMode(BreakMode deleteBreakMode) { this.deleteBreakMode = deleteBreakMode; } public void setBreakMode(BreakMode breakMode) { this.schemaBreakMode = breakMode; this.addBreakMode = breakMode; this.getBreakMode = breakMode; this.modifyBreakMode = breakMode; this.deleteBreakMode = breakMode; } public void resetBreakMode() { setBreakMode(BreakMode.NONE); } public String getUselessString() { return uselessString; } public void setUselessString(String uselessString) { this.uselessString = uselessString; } public String getUselessGuardedString() { return uselessGuardedString; } public void setUselessGuardedString(String uselessGuardedString) { this.uselessGuardedString = uselessGuardedString; } public boolean isCaseIgnoreId() { return caseIgnoreId; } public void setCaseIgnoreId(boolean caseIgnoreId) { this.caseIgnoreId = caseIgnoreId; } public boolean isCaseIgnoreValues() { return caseIgnoreValues; } public void setCaseIgnoreValues(boolean caseIgnoreValues) { this.caseIgnoreValues = caseIgnoreValues; } public boolean isGenerateAccountDescriptionOnCreate() { return generateAccountDescriptionOnCreate; } public void setGenerateAccountDescriptionOnCreate(boolean generateAccountDescriptionOnCreate) { this.generateAccountDescriptionOnCreate = generateAccountDescriptionOnCreate; } public boolean isGenerateAccountDescriptionOnUpdate() { return generateAccountDescriptionOnUpdate; } public void setGenerateAccountDescriptionOnUpdate(boolean generateAccountDescriptionOnUpdate) { this.generateAccountDescriptionOnUpdate = generateAccountDescriptionOnUpdate; } public Collection<String> getForbiddenNames() { return forbiddenNames; } public void setForbiddenNames(Collection<String> forbiddenNames) { this.forbiddenNames = forbiddenNames; } public int getConnectionCount() { return connectionCount; } public synchronized void connect() { connectionCount++; } public synchronized void disconnect() { connectionCount--; } public void assertNoConnections() { assert connectionCount == 0 : "Dummy resource: "+connectionCount+" connections still open"; } public int getGroupMembersReadCount() { return groupMembersReadCount; } public void setGroupMembersReadCount(int groupMembersReadCount) { this.groupMembersReadCount = groupMembersReadCount; } public void recordGroupMembersReadCount() { groupMembersReadCount++; traceOperation("groupMembersRead", groupMembersReadCount); } public DummyObjectClass getAccountObjectClass() throws ConnectException, FileNotFoundException { if (schemaBreakMode == BreakMode.NONE) { return accountObjectClass; } else if (schemaBreakMode == BreakMode.NETWORK) { throw new ConnectException("The schema is not available (simulated error)"); } else if (schemaBreakMode == BreakMode.IO) { throw new FileNotFoundException("The schema file not found (simulated error)"); } else if (schemaBreakMode == BreakMode.GENERIC) { // The connector will react with generic exception throw new IllegalArgumentException("Generic error fetching schema (simulated error)"); } else if (schemaBreakMode == BreakMode.RUNTIME) { // The connector will just pass this up throw new IllegalStateException("Generic error fetching schema (simulated error)"); } else if (schemaBreakMode == BreakMode.UNSUPPORTED) { throw new UnsupportedOperationException("Schema is not supported (simulated error)"); } else { // This is a real error. Use this strange thing to make sure it passes up throw new RuntimeException("Unknown schema break mode "+schemaBreakMode); } } public DummyObjectClass getGroupObjectClass() { return groupObjectClass; } public DummyObjectClass getPrivilegeObjectClass() { return privilegeObjectClass; } public Collection<DummyAccount> listAccounts() throws ConnectException, FileNotFoundException { if (getBreakMode == BreakMode.NONE) { return accounts.values(); } else if (schemaBreakMode == BreakMode.NETWORK) { throw new ConnectException("Network error (simulated error)"); } else if (schemaBreakMode == BreakMode.IO) { throw new FileNotFoundException("IO error (simulated error)"); } else if (schemaBreakMode == BreakMode.GENERIC) { // The connector will react with generic exception throw new IllegalArgumentException("Generic error (simulated error)"); } else if (schemaBreakMode == BreakMode.RUNTIME) { // The connector will just pass this up throw new IllegalStateException("Generic error (simulated error)"); } else if (schemaBreakMode == BreakMode.UNSUPPORTED) { throw new UnsupportedOperationException("Not supported (simulated error)"); } else { // This is a real error. Use this strange thing to make sure it passes up throw new RuntimeException("Unknown schema break mode "+schemaBreakMode); } } private <T extends DummyObject> T getObjectByName(Map<String,T> map, String name) throws ConnectException, FileNotFoundException { if (!enforceUniqueName) { throw new IllegalStateException("Attempt to search object by name while resource is in non-unique name mode"); } if (getBreakMode == BreakMode.NONE) { return map.get(normalize(name)); } else if (schemaBreakMode == BreakMode.NETWORK) { throw new ConnectException("Network error (simulated error)"); } else if (schemaBreakMode == BreakMode.IO) { throw new FileNotFoundException("IO error (simulated error)"); } else if (schemaBreakMode == BreakMode.GENERIC) { // The connector will react with generic exception throw new IllegalArgumentException("Generic error (simulated error)"); } else if (schemaBreakMode == BreakMode.RUNTIME) { // The connector will just pass this up throw new IllegalStateException("Generic error (simulated error)"); } else if (schemaBreakMode == BreakMode.UNSUPPORTED) { throw new UnsupportedOperationException("Not supported (simulated error)"); } else { // This is a real error. Use this strange thing to make sure it passes up throw new RuntimeException("Unknown schema break mode "+schemaBreakMode); } } public DummyAccount getAccountByUsername(String username) throws ConnectException, FileNotFoundException { return getObjectByName(accounts, username); } public DummyGroup getGroupByName(String name) throws ConnectException, FileNotFoundException { return getObjectByName(groups, name); } public DummyPrivilege getPrivilegeByName(String name) throws ConnectException, FileNotFoundException { return getObjectByName(privileges, name); } private <T extends DummyObject> T getObjectById(Class<T> expectedClass, String id) throws ConnectException, FileNotFoundException { if (getBreakMode == BreakMode.NONE) { DummyObject dummyObject = allObjects.get(id); if (dummyObject == null) { return null; } if (!expectedClass.isInstance(dummyObject)) { throw new IllegalStateException("Arrrr! Wanted "+expectedClass+" with ID "+id+" but got "+dummyObject+" instead"); } return (T)dummyObject; } else if (schemaBreakMode == BreakMode.NETWORK) { throw new ConnectException("Network error (simulated error)"); } else if (schemaBreakMode == BreakMode.IO) { throw new FileNotFoundException("IO error (simulated error)"); } else if (schemaBreakMode == BreakMode.GENERIC) { // The connector will react with generic exception throw new IllegalArgumentException("Generic error (simulated error)"); } else if (schemaBreakMode == BreakMode.RUNTIME) { // The connector will just pass this up throw new IllegalStateException("Generic error (simulated error)"); } else if (schemaBreakMode == BreakMode.UNSUPPORTED) { throw new UnsupportedOperationException("Not supported (simulated error)"); } else { // This is a real error. Use this strange thing to make sure it passes up throw new RuntimeException("Unknown schema break mode "+schemaBreakMode); } } public DummyAccount getAccountById(String id) throws ConnectException, FileNotFoundException { return getObjectById(DummyAccount.class, id); } public DummyGroup getGroupById(String id) throws ConnectException, FileNotFoundException { return getObjectById(DummyGroup.class, id); } public DummyPrivilege getPrivilegeById(String id) throws ConnectException, FileNotFoundException { return getObjectById(DummyPrivilege.class, id); } public Collection<DummyGroup> listGroups() throws ConnectException, FileNotFoundException { if (getBreakMode == BreakMode.NONE) { return groups.values(); } else if (schemaBreakMode == BreakMode.NETWORK) { throw new ConnectException("Network error (simulated error)"); } else if (schemaBreakMode == BreakMode.IO) { throw new FileNotFoundException("IO error (simulated error)"); } else if (schemaBreakMode == BreakMode.GENERIC) { // The connector will react with generic exception throw new IllegalArgumentException("Generic error (simulated error)"); } else if (schemaBreakMode == BreakMode.RUNTIME) { // The connector will just pass this up throw new IllegalStateException("Generic error (simulated error)"); } else if (schemaBreakMode == BreakMode.UNSUPPORTED) { throw new UnsupportedOperationException("Not supported (simulated error)"); } else { // This is a real error. Use this strange thing to make sure it passes up throw new RuntimeException("Unknown schema break mode "+schemaBreakMode); } } public Collection<DummyPrivilege> listPrivileges() throws ConnectException, FileNotFoundException { if (getBreakMode == BreakMode.NONE) { return privileges.values(); } else if (schemaBreakMode == BreakMode.NETWORK) { throw new ConnectException("Network error (simulated error)"); } else if (schemaBreakMode == BreakMode.IO) { throw new FileNotFoundException("IO error (simulated error)"); } else if (schemaBreakMode == BreakMode.GENERIC) { // The connector will react with generic exception throw new IllegalArgumentException("Generic error (simulated error)"); } else if (schemaBreakMode == BreakMode.RUNTIME) { // The connector will just pass this up throw new IllegalStateException("Generic error (simulated error)"); } else if (schemaBreakMode == BreakMode.UNSUPPORTED) { throw new UnsupportedOperationException("Not supported (simulated error)"); } else { // This is a real error. Use this strange thing to make sure it passes up throw new RuntimeException("Unknown schema break mode "+schemaBreakMode); } } private synchronized <T extends DummyObject> String addObject(Map<String,T> map, T newObject) throws ObjectAlreadyExistsException, ConnectException, FileNotFoundException, SchemaViolationException { if (addBreakMode == BreakMode.NONE) { // just go on } else if (addBreakMode == BreakMode.NETWORK) { throw new ConnectException("Network error during add (simulated error)"); } else if (addBreakMode == BreakMode.IO) { throw new FileNotFoundException("IO error during add (simulated error)"); } else if (addBreakMode == BreakMode.GENERIC) { // The connector will react with generic exception throw new IllegalArgumentException("Generic error during add (simulated error)"); } else if (addBreakMode == BreakMode.RUNTIME) { // The connector will just pass this up throw new IllegalStateException("Generic rutime error during add (simulated error)"); } else if (addBreakMode == BreakMode.UNSUPPORTED) { throw new UnsupportedOperationException("Unsupported operation: add (simulated error)"); } else { // This is a real error. Use this strange thing to make sure it passes up throw new RuntimeException("Unknown break mode "+addBreakMode); } Class<? extends DummyObject> type = newObject.getClass(); String normalName = normalize(newObject.getName()); if (normalName != null && forbiddenNames != null && forbiddenNames.contains(normalName)) { throw new ObjectAlreadyExistsException(normalName + " is forbidden to use as an object name"); } String newId = UUID.randomUUID().toString(); newObject.setId(newId); if (allObjects.containsKey(newId)) { throw new IllegalStateException("The hell is frozen over. The impossible has happened. ID "+newId+" already exists ("+ type.getSimpleName()+" with identifier "+normalName+")"); } //this is "resource-generated" attribute (used to simulate resource which generate by default attributes which we need to sync) if (generateDefaultValues){ // int internalId = allObjects.size(); newObject.addAttributeValue(DummyAccount.ATTR_INTERNAL_ID, new Random().nextInt()); } String mapKey; if (enforceUniqueName) { mapKey = normalName; } else { mapKey = newId; } if (map.containsKey(mapKey)) { throw new ObjectAlreadyExistsException(type.getSimpleName()+" with name '"+normalName+"' already exists"); } newObject.setResource(this); map.put(mapKey, newObject); allObjects.put(newId, newObject); if (syncStyle != DummySyncStyle.NONE) { int syncToken = nextSyncToken(); DummyDelta delta = new DummyDelta(syncToken, type, newId, newObject.getName(), DummyDeltaType.ADD); deltas.add(delta); } return newObject.getName(); } private synchronized <T extends DummyObject> void deleteObjectByName(Class<T> type, Map<String,T> map, String name) throws ObjectDoesNotExistException, ConnectException, FileNotFoundException { if (deleteBreakMode == BreakMode.NONE) { // go on } else if (deleteBreakMode == BreakMode.NETWORK) { throw new ConnectException("Network error (simulated error)"); } else if (deleteBreakMode == BreakMode.IO) { throw new FileNotFoundException("IO error (simulated error)"); } else if (deleteBreakMode == BreakMode.GENERIC) { // The connector will react with generic exception throw new IllegalArgumentException("Generic error (simulated error)"); } else if (deleteBreakMode == BreakMode.RUNTIME) { // The connector will just pass this up throw new IllegalStateException("Generic error (simulated error)"); } else if (deleteBreakMode == BreakMode.UNSUPPORTED) { throw new UnsupportedOperationException("Not supported (simulated error)"); } else { // This is a real error. Use this strange thing to make sure it passes up throw new RuntimeException("Unknown schema break mode "+schemaBreakMode); } String normalName = normalize(name); T existingObject; if (!enforceUniqueName) { throw new IllegalStateException("Whoops! got into deleteObjectByName without enforceUniqueName"); } if (map.containsKey(normalName)) { existingObject = map.get(normalName); map.remove(normalName); allObjects.remove(existingObject.getId()); } else { throw new ObjectDoesNotExistException(type.getSimpleName()+" with name '"+normalName+"' does not exist"); } if (syncStyle != DummySyncStyle.NONE) { int syncToken = nextSyncToken(); DummyDelta delta = new DummyDelta(syncToken, type, existingObject.getId(), name, DummyDeltaType.DELETE); deltas.add(delta); } } public void deleteAccountById(String id) throws ConnectException, FileNotFoundException, ObjectDoesNotExistException { deleteObjectById(DummyAccount.class, accounts, id); } public void deleteGroupById(String id) throws ConnectException, FileNotFoundException, ObjectDoesNotExistException { deleteObjectById(DummyGroup.class, groups, id); } public void deletePrivilegeById(String id) throws ConnectException, FileNotFoundException, ObjectDoesNotExistException { deleteObjectById(DummyPrivilege.class, privileges, id); } private synchronized <T extends DummyObject> void deleteObjectById(Class<T> type, Map<String,T> map, String id) throws ObjectDoesNotExistException, ConnectException, FileNotFoundException { if (deleteBreakMode == BreakMode.NONE) { // go on } else if (deleteBreakMode == BreakMode.NETWORK) { throw new ConnectException("Network error (simulated error)"); } else if (deleteBreakMode == BreakMode.IO) { throw new FileNotFoundException("IO error (simulated error)"); } else if (deleteBreakMode == BreakMode.GENERIC) { // The connector will react with generic exception throw new IllegalArgumentException("Generic error (simulated error)"); } else if (deleteBreakMode == BreakMode.RUNTIME) { // The connector will just pass this up throw new IllegalStateException("Generic error (simulated error)"); } else if (deleteBreakMode == BreakMode.UNSUPPORTED) { throw new UnsupportedOperationException("Not supported (simulated error)"); } else { // This is a real error. Use this strange thing to make sure it passes up throw new RuntimeException("Unknown schema break mode "+schemaBreakMode); } DummyObject object = allObjects.get(id); if (object == null) { throw new ObjectDoesNotExistException(type.getSimpleName()+" with id '"+id+"' does not exist"); } if (!type.isInstance(object)) { throw new IllegalStateException("Arrrr! Wanted "+type+" with ID "+id+" but got "+object+" instead"); } T existingObject = (T)object; String normalName = normalize(object.getName()); allObjects.remove(id); String mapKey; if (enforceUniqueName) { mapKey = normalName; } else { mapKey = id; } if (map.containsKey(mapKey)) { map.remove(mapKey); } else { throw new ObjectDoesNotExistException(type.getSimpleName()+" with name '"+normalName+"' does not exist"); } if (syncStyle != DummySyncStyle.NONE) { int syncToken = nextSyncToken(); DummyDelta delta = new DummyDelta(syncToken, type, id, object.getName(), DummyDeltaType.DELETE); deltas.add(delta); } } private <T extends DummyObject> void renameObject(Class<T> type, Map<String,T> map, String id, String oldName, String newName) throws ObjectDoesNotExistException, ObjectAlreadyExistsException, ConnectException, FileNotFoundException { if (modifyBreakMode == BreakMode.NONE) { // go on } else if (modifyBreakMode == BreakMode.NETWORK) { throw new ConnectException("Network error (simulated error)"); } else if (modifyBreakMode == BreakMode.IO) { throw new FileNotFoundException("IO error (simulated error)"); } else if (modifyBreakMode == BreakMode.GENERIC) { // The connector will react with generic exception throw new IllegalArgumentException("Generic error (simulated error)"); } else if (modifyBreakMode == BreakMode.RUNTIME) { // The connector will just pass this up throw new IllegalStateException("Generic error (simulated error)"); } else if (modifyBreakMode == BreakMode.UNSUPPORTED) { throw new UnsupportedOperationException("Not supported (simulated error)"); } else { // This is a real error. Use this strange thing to make sure it passes up throw new RuntimeException("Unknown schema break mode "+schemaBreakMode); } T existingObject; if (enforceUniqueName) { String normalOldName = normalize(oldName); String normalNewName = normalize(newName); existingObject = map.get(normalOldName); if (existingObject == null) { throw new ObjectDoesNotExistException("Cannot rename, "+type.getSimpleName()+" with username '"+normalOldName+"' does not exist"); } if (map.containsKey(normalNewName)) { throw new ObjectAlreadyExistsException("Cannot rename, "+type.getSimpleName()+" with username '"+normalNewName+"' already exists"); } map.put(normalNewName, existingObject); map.remove(normalOldName); } else { existingObject = (T) allObjects.get(id); } existingObject.setName(newName); if (existingObject instanceof DummyAccount) { changeDescriptionIfNeeded((DummyAccount) existingObject); } } public String addAccount(DummyAccount newAccount) throws ObjectAlreadyExistsException, ConnectException, FileNotFoundException, SchemaViolationException { if (generateAccountDescriptionOnCreate && newAccount.getAttributeValue(DummyAccount.ATTR_DESCRIPTION_NAME) == null) { newAccount.addAttributeValue(DummyAccount.ATTR_DESCRIPTION_NAME, "Description of " + newAccount.getName()); } return addObject(accounts, newAccount); } public void deleteAccountByName(String id) throws ObjectDoesNotExistException, ConnectException, FileNotFoundException { deleteObjectByName(DummyAccount.class, accounts, id); } public void renameAccount(String id, String oldUsername, String newUsername) throws ObjectDoesNotExistException, ObjectAlreadyExistsException, ConnectException, FileNotFoundException, SchemaViolationException { renameObject(DummyAccount.class, accounts, id, oldUsername, newUsername); for (DummyGroup group : groups.values()) { if (group.containsMember(oldUsername)) { group.removeMember(oldUsername); group.addMember(newUsername); } } } public void changeDescriptionIfNeeded(DummyAccount account) { if (generateAccountDescriptionOnCreate) { try { account.replaceAttributeValue(DummyAccount.ATTR_DESCRIPTION_NAME, "Updated description of " + account.getName()); } catch (SchemaViolationException|ConnectException|FileNotFoundException e) { throw new SystemException("Couldn't replace the 'description' attribute value", e); } } } public String addGroup(DummyGroup newGroup) throws ObjectAlreadyExistsException, ConnectException, FileNotFoundException, SchemaViolationException { return addObject(groups, newGroup); } public void deleteGroupByName(String id) throws ObjectDoesNotExistException, ConnectException, FileNotFoundException { deleteObjectByName(DummyGroup.class, groups, id); } public void renameGroup(String id, String oldName, String newName) throws ObjectDoesNotExistException, ObjectAlreadyExistsException, ConnectException, FileNotFoundException { renameObject(DummyGroup.class, groups, id, oldName, newName); } public String addPrivilege(DummyPrivilege newGroup) throws ObjectAlreadyExistsException, ConnectException, FileNotFoundException, SchemaViolationException { return addObject(privileges, newGroup); } public void deletePrivilegeByName(String id) throws ObjectDoesNotExistException, ConnectException, FileNotFoundException { deleteObjectByName(DummyPrivilege.class, privileges, id); } public void renamePrivilege(String id, String oldName, String newName) throws ObjectDoesNotExistException, ObjectAlreadyExistsException, ConnectException, FileNotFoundException { renameObject(DummyPrivilege.class, privileges, id, oldName, newName); } void recordModify(DummyObject dObject) { if (syncStyle != DummySyncStyle.NONE) { int syncToken = nextSyncToken(); DummyDelta delta = new DummyDelta(syncToken, dObject.getClass(), dObject.getId(), dObject.getName(), DummyDeltaType.MODIFY); deltas.add(delta); } } /** * Returns script history ordered chronologically (oldest first). * @return script history */ public List<ScriptHistoryEntry> getScriptHistory() { return scriptHistory; } /** * Clears the script history. */ public void purgeScriptHistory() { scriptHistory.clear(); } /** * Pretend to run script on the resource. * The script is actually not executed, it is only recorded in the script history * and can be fetched by getScriptHistory(). * * @param scriptCode code of the script */ public void runScript(String language, String scriptCode, Map<String, Object> params) { scriptHistory.add(new ScriptHistoryEntry(language, scriptCode, params)); } /** * Populates the resource with some kind of "default" schema. This is a schema that should suit * majority of basic test cases. */ public void populateWithDefaultSchema() { accountObjectClass.clear(); accountObjectClass.addAttributeDefinition(DummyAccount.ATTR_FULLNAME_NAME, String.class, true, false); accountObjectClass.addAttributeDefinition(DummyAccount.ATTR_INTERNAL_ID, String.class, false, false); accountObjectClass.addAttributeDefinition(DummyAccount.ATTR_DESCRIPTION_NAME, String.class, false, false); accountObjectClass.addAttributeDefinition(DummyAccount.ATTR_INTERESTS_NAME, String.class, false, true); accountObjectClass.addAttributeDefinition(DummyAccount.ATTR_PRIVILEGES_NAME, String.class, false, true); groupObjectClass.clear(); groupObjectClass.addAttributeDefinition(DummyGroup.ATTR_MEMBERS_NAME, String.class, false, true); privilegeObjectClass.clear(); } public DummySyncStyle getSyncStyle() { return syncStyle; } public void setSyncStyle(DummySyncStyle syncStyle) { this.syncStyle = syncStyle; } private synchronized int nextSyncToken() { return ++latestSyncToken; } public int getLatestSyncToken() { return latestSyncToken; } private String normalize(String id) { if (caseIgnoreId) { return StringUtils.lowerCase(id); } else { return id; } } public List<DummyDelta> getDeltasSince(int syncToken) { List<DummyDelta> result = new ArrayList<DummyDelta>(); for (DummyDelta delta: deltas) { if (delta.getSyncToken() > syncToken) { result.add(delta); } } return result; } private void traceOperation(String opName, long counter) { LOGGER.info("MONITOR dummy '{}' {} ({})", instanceName, opName, counter); if (LOGGER.isDebugEnabled()) { StackTraceElement[] fullStack = Thread.currentThread().getStackTrace(); String immediateClass = null; String immediateMethod = null; StringBuilder sb = new StringBuilder(); for (StackTraceElement stackElement: fullStack) { if (stackElement.getClassName().equals(DummyResource.class.getName()) || stackElement.getClassName().equals(Thread.class.getName())) { // skip our own calls continue; } if (immediateClass == null) { immediateClass = stackElement.getClassName(); immediateMethod = stackElement.getMethodName(); } sb.append(stackElement.toString()); sb.append("\n"); } LOGGER.debug("MONITOR dummy '{}' {} ({}): {} {}", new Object[]{instanceName, opName, counter, immediateClass, immediateMethod}); LOGGER.trace("MONITOR dummy '{}' {} ({}):\n{}", new Object[]{instanceName, opName, counter, sb}); } } @Override public String debugDump() { return debugDump(0); } @Override public String debugDump(int indent) { StringBuilder sb = new StringBuilder(toString()); DebugUtil.indentDebugDump(sb, indent); sb.append("\nAccounts:"); for (Entry<String, DummyAccount> entry: accounts.entrySet()) { sb.append("\n "); sb.append(entry.getKey()); sb.append(": "); sb.append(entry.getValue()); } sb.append("\nGroups:"); for (Entry<String, DummyGroup> entry: groups.entrySet()) { sb.append("\n "); sb.append(entry.getKey()); sb.append(": "); sb.append(entry.getValue()); } sb.append("\nDeltas:"); for (DummyDelta delta: deltas) { sb.append("\n "); sb.append(delta); } sb.append("\nLatest token:").append(latestSyncToken); return sb.toString(); } @Override public String toString() { return "DummyResource("+accounts.size()+" accounts, "+groups.size()+" groups)"; } }
package org.zalando.riptide; import org.junit.jupiter.api.Test; import org.springframework.core.io.ClassPathResource; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.client.ClientHttpResponse; import org.springframework.test.web.client.MockRestServiceServer; import org.springframework.web.client.RestClientException; import org.zalando.riptide.model.Success; import java.net.URI; import java.util.concurrent.CompletionException; import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hobsoft.hamcrest.compose.ComposeMatchers.hasFeature; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.springframework.http.HttpStatus.ACCEPTED; import static org.springframework.http.HttpStatus.MOVED_PERMANENTLY; import static org.springframework.http.HttpStatus.NOT_FOUND; import static org.springframework.http.HttpStatus.OK; import static org.springframework.http.HttpStatus.Series.CLIENT_ERROR; import static org.springframework.http.HttpStatus.Series.SUCCESSFUL; import static org.springframework.http.MediaType.APPLICATION_JSON; import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE; import static org.springframework.http.MediaType.APPLICATION_XML; import static org.springframework.http.MediaType.TEXT_PLAIN; import static org.springframework.test.web.client.match.MockRestRequestMatchers.requestTo; import static org.springframework.test.web.client.response.MockRestResponseCreators.withCreatedEntity; import static org.springframework.test.web.client.response.MockRestResponseCreators.withStatus; import static org.springframework.test.web.client.response.MockRestResponseCreators.withSuccess; import static org.zalando.riptide.Bindings.anyContentType; import static org.zalando.riptide.Bindings.anySeries; import static org.zalando.riptide.Bindings.anyStatus; import static org.zalando.riptide.Bindings.on; import static org.zalando.riptide.Navigators.contentType; import static org.zalando.riptide.Navigators.series; import static org.zalando.riptide.Navigators.status; import static org.zalando.riptide.PassRoute.pass; import static org.zalando.riptide.model.MediaTypes.ERROR; import static org.zalando.riptide.model.MediaTypes.PROBLEM; import static org.zalando.riptide.model.MediaTypes.SUCCESS; final class FailedDispatchTest { private final String url = "https://api.example.com"; private final Http unit; private final MockRestServiceServer server; FailedDispatchTest() { final MockSetup setup = new MockSetup(); this.unit = setup.getHttp(); this.server = setup.getServer(); } @Test void shouldThrowIfNoMatch() { server.expect(requestTo(url)) .andRespond(withSuccess() .body("") .contentType(APPLICATION_JSON)); final CompletionException exception = assertThrows(CompletionException.class, () -> unit.options(url) .dispatch(contentType(), // note that we don't match on application/json explicitly on(SUCCESS).call(pass()), on(PROBLEM).call(pass()), on(ERROR).call(pass())) .join()); assertThat(exception.getCause(), is(instanceOf(UnexpectedResponseException.class))); assertThat(exception.getMessage(), containsString("Unable to dispatch response: 200 - OK")); assertThat(exception.getMessage(), containsString("Content-Type")); assertThat(exception.getMessage(), containsString(APPLICATION_JSON_VALUE)); } @Test void shouldThrowOnFailedConversionBecauseOfUnknownContentType() { server.expect(requestTo(url)) .andRespond(withSuccess() .body("{}") .contentType(MediaType.APPLICATION_ATOM_XML)); final CompletionException exception = assertThrows(CompletionException.class, () -> unit.get(url) .dispatch(status(), on(OK).dispatch(series(), on(SUCCESSFUL).call(Success.class, success -> {}), anySeries().call(pass())), on(HttpStatus.CREATED).call(pass()), anyStatus().call(this::fail)) .join()); assertThat(exception.getCause(), is(instanceOf(RestClientException.class))); assertThat(exception.getMessage(), containsString("no suitable HttpMessageConverter found for response type")); } @Test void shouldThrowOnFailedConversionBecauseOfFaultyBody() { server.expect(requestTo(url)) .andRespond(withSuccess() .body("{") .contentType(SUCCESS)); assertThrows(CompletionException.class, () -> unit.get(url) .dispatch(status(), on(OK) .dispatch(series(), on(SUCCESSFUL).call(Success.class, success -> { }), anySeries().call(pass())), on(HttpStatus.CREATED).call(pass()), anyStatus().call(this::fail)) .join()); } @Test void shouldHandleNoBodyAtAll() { final HttpHeaders headers = new HttpHeaders(); headers.setContentLength(0); server.expect(requestTo(url)) .andRespond(withStatus(OK) .headers(headers) .contentType(SUCCESS)); final AtomicReference<Success> success = new AtomicReference<>(); unit.get(url) .dispatch(status(), on(OK) .dispatch(contentType(), on(SUCCESS).call(Success.class, success::set), anyContentType().call(this::fail)), on(HttpStatus.CREATED).call(Success.class, success::set), anyStatus().call(this::fail)) .join(); assertThat(success.get(), is(nullValue())); } private void fail(final ClientHttpResponse response) { throw new AssertionError("Should not have been executed, but received: " + response); } @Test void shouldPropagateIfNoMatch() throws Exception { server.expect(requestTo(url)) .andRespond(withSuccess() .body(new ClassPathResource("success.json")) .contentType(APPLICATION_JSON)); final ClientHttpResponseConsumer consumer = mock(ClientHttpResponseConsumer.class); unit.get(url) .dispatch(series(), on(SUCCESSFUL).dispatch(status(), on(OK).dispatch(contentType(), on(APPLICATION_XML).call(pass()), on(TEXT_PLAIN).call(pass())), on(ACCEPTED).call(pass()), anyStatus().call(consumer)), on(CLIENT_ERROR).call(pass())) .join(); verify(consumer).tryAccept(any()); } @Test void shouldPropagateMultipleLevelsIfNoMatch() throws Exception { server.expect(requestTo(url)) .andRespond(withSuccess() .body(new ClassPathResource("success.json")) .contentType(APPLICATION_JSON)); final ClientHttpResponseConsumer consumer = mock(ClientHttpResponseConsumer.class); unit.get(url) .dispatch(series(), on(SUCCESSFUL).dispatch(status(), on(OK).dispatch(contentType(), on(APPLICATION_XML).call(pass()), on(TEXT_PLAIN).call(pass())), on(ACCEPTED).call(pass())), on(CLIENT_ERROR).call(pass()), anySeries().call(consumer)) .join(); verify(consumer).tryAccept(any()); } @Test void shouldPreserveExceptionIfPropagateFailed() { server.expect(requestTo(url)) .andRespond(withCreatedEntity(URI.create("about:blank")) .body(new ClassPathResource("success.json")) .contentType(APPLICATION_JSON)); final CompletionException exception = assertThrows(CompletionException.class, () -> unit.post(url) .dispatch(series(), on(SUCCESSFUL).dispatch(contentType(), on(APPLICATION_JSON).dispatch(status(), on(OK).call(pass()), on(MOVED_PERMANENTLY).call(pass()), on(NOT_FOUND).call(pass())), on(APPLICATION_XML).call(pass()), on(TEXT_PLAIN).call(pass())), on(CLIENT_ERROR).call(pass())) .join()); assertThat(exception.getMessage(), containsString("Unable to dispatch response: 201 - Created")); assertThat(exception.getMessage(), containsString("Content-Type")); assertThat(exception.getMessage(), containsString(APPLICATION_JSON_VALUE)); final UnexpectedResponseException cause = (UnexpectedResponseException) exception.getCause(); assertThat(cause, hasFeature("raw status code", UnexpectedResponseException::getRawStatusCode, is(201))); assertThat(cause, hasFeature("status text", UnexpectedResponseException::getStatusText, is("Created"))); assertThat(cause, hasFeature("response headers", UnexpectedResponseException::getResponseHeaders, is(notNullValue()))); assertThat(cause, hasFeature("response body", UnexpectedResponseException::getResponseBody, is(notNullValue()))); } }
/* * Copyright 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.bitcoin.core; import com.google.bitcoin.utils.ListenerRegistration; import com.google.bitcoin.utils.Threading; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import javax.annotation.Nullable; import java.io.Serializable; import java.math.BigInteger; import java.util.ListIterator; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.Executor; /** * <p>A TransactionConfidence object tracks data you can use to make a confidence decision about a transaction. * It also contains some pre-canned rules for common scenarios: if you aren't really sure what level of confidence * you need, these should prove useful. You can get a confidence object using {@link Transaction#getConfidence()}. * They cannot be constructed directly.</p> * * <p>Confidence in a transaction can come in multiple ways:</p> * * <ul> * <li>Because you created it yourself and only you have the necessary keys.</li> * <li>Receiving it from a fully validating peer you know is trustworthy, for instance, because it's run by yourself.</li> * <li>Receiving it from a peer on the network you randomly chose. If your network connection is not being * intercepted, you have a pretty good chance of connecting to a node that is following the rules.</li> * <li>Receiving it from multiple peers on the network. If your network connection is not being intercepted, * hearing about a transaction from multiple peers indicates the network has accepted the transaction and * thus miners likely have too (miners have the final say in whether a transaction becomes valid or not).</li> * <li>Seeing the transaction appear appear in a block on the main chain. Your confidence increases as the transaction * becomes further buried under work. Work can be measured either in blocks (roughly, units of time), or * amount of work done.</li> * </ul> * * <p>Alternatively, you may know that the transaction is "dead", that is, one or more of its inputs have * been double spent and will never confirm unless there is another re-org.</p> * * <p>TransactionConfidence is updated via the {@link com.google.bitcoin.core.TransactionConfidence#notifyWorkDone(Block)} * method to ensure the block depth and work done are up to date.</p> * To make a copy that won't be changed, use {@link com.google.bitcoin.core.TransactionConfidence#duplicate()}. */ public class TransactionConfidence implements Serializable { private static final long serialVersionUID = 4577920141400556444L; /** * The peers that have announced the transaction to us. Network nodes don't have stable identities, so we use * IP address as an approximation. It's obviously vulnerable to being gamed if we allow arbitrary people to connect * to us, so only peers we explicitly connected to should go here. */ private CopyOnWriteArrayList<PeerAddress> broadcastBy; /** The Transaction that this confidence object is associated with. */ private final Transaction transaction; // Lazily created listeners array. private transient CopyOnWriteArrayList<ListenerRegistration<Listener>> listeners; // The depth of the transaction on the best chain in blocks. An unconfirmed block has depth 0. private int depth; // The cumulative work done for the blocks that bury this transaction. private BigInteger workDone = BigInteger.ZERO; /** Describes the state of the transaction in general terms. Properties can be read to learn specifics. */ public enum ConfidenceType { /** If BUILDING, then the transaction is included in the best chain and your confidence in it is increasing. */ BUILDING(1), /** * If PENDING, then the transaction is unconfirmed and should be included shortly, as long as it is being * announced and is considered valid by the network. A pending transaction will be announced if the containing * wallet has been attached to a live {@link PeerGroup} using {@link PeerGroup#addWallet(Wallet)}. * You can estimate how likely the transaction is to be included by connecting to a bunch of nodes then measuring * how many announce it, using {@link com.google.bitcoin.core.TransactionConfidence#numBroadcastPeers()}. * Or if you saw it from a trusted peer, you can assume it's valid and will get mined sooner or later as well. */ PENDING(2), /** * If DEAD, then it means the transaction won't confirm unless there is another re-org, * because some other transaction is spending one of its inputs. Such transactions should be alerted to the user * so they can take action, eg, suspending shipment of goods if they are a merchant. * It can also mean that a coinbase transaction has been made dead from it being moved onto a side chain. */ DEAD(4), /** * If a transaction hasn't been broadcast yet, or there's no record of it, its confidence is UNKNOWN. */ UNKNOWN(0); private int value; ConfidenceType(int value) { this.value = value; } public int getValue() { return value; } } private ConfidenceType confidenceType = ConfidenceType.UNKNOWN; private int appearedAtChainHeight = -1; // The transaction that double spent this one, if any. private Transaction overridingTransaction; /** * Information about where the transaction was first seen (network, sent direct from peer, created by ourselves). * Useful for risk analyzing pending transactions. Probably not that useful after a tx is included in the chain, * unless re-org double spends start happening frequently. */ public enum Source { /** We don't know where the transaction came from. */ UNKNOWN, /** We got this transaction from a network peer. */ NETWORK, /** This transaction was created by our own wallet, so we know it's not a double spend. */ SELF } private Source source = Source.UNKNOWN; public TransactionConfidence(Transaction tx) { // Assume a default number of peers for our set. broadcastBy = new CopyOnWriteArrayList<PeerAddress>(); listeners = new CopyOnWriteArrayList<ListenerRegistration<Listener>>(); transaction = tx; } /** * <p>A confidence listener is informed when the level of {@link TransactionConfidence} is updated by something, like * for example a {@link Wallet}. You can add listeners to update your user interface or manage your order tracking * system when confidence levels pass a certain threshold. <b>Note that confidence can go down as well as up.</b> * For example, this can happen if somebody is doing a double-spend attack against you. Whilst it's unlikely, your * code should be able to handle that in order to be correct.</p> * * <p>During listener execution, it's safe to remove the current listener but not others.</p> */ public interface Listener { /** An enum that describes why a transaction confidence listener is being invoked (i.e. the class of change). */ public enum ChangeReason { /** * Occurs when the type returned by {@link com.google.bitcoin.core.TransactionConfidence#getConfidenceType()} * has changed. For example, if a PENDING transaction changes to BUILDING or DEAD, then this reason will * be given. It's a high level summary. */ TYPE, /** * Occurs when a transaction that is in the best known block chain gets buried by another block. If you're * waiting for a certain number of confirmations, this is the reason to watch out for. */ DEPTH, /** * Occurs when a pending transaction (not in the chain) was announced by another connected peers. By * watching the number of peers that announced a transaction go up, you can see whether it's being * accepted by the network or not. If all your peers announce, it's a pretty good bet the transaction * is considered relayable and has thus reached the miners. */ SEEN_PEERS, } public void onConfidenceChanged(Transaction tx, ChangeReason reason); } /** * <p>Adds an event listener that will be run when this confidence object is updated. The listener will be locked and * is likely to be invoked on a peer thread.</p> * * <p>Note that this is NOT called when every block arrives. Instead it is called when the transaction * transitions between confidence states, ie, from not being seen in the chain to being seen (not necessarily in * the best chain). If you want to know when the transaction gets buried under another block, consider using * a future from {@link #getDepthFuture(int)}.</p> */ public void addEventListener(Listener listener, Executor executor) { Preconditions.checkNotNull(listener); listeners.addIfAbsent(new ListenerRegistration<Listener>(listener, executor)); } /** * <p>Adds an event listener that will be run when this confidence object is updated. The listener will be locked and * is likely to be invoked on a peer thread.</p> * * <p>Note that this is NOT called when every block arrives. Instead it is called when the transaction * transitions between confidence states, ie, from not being seen in the chain to being seen (not necessarily in * the best chain). If you want to know when the transaction gets buried under another block, implement a * {@link BlockChainListener}, attach it to a {@link BlockChain} and then use the getters on the * confidence object to determine the new depth.</p> */ public void addEventListener(Listener listener) { addEventListener(listener, Threading.USER_THREAD); } public boolean removeEventListener(Listener listener) { Preconditions.checkNotNull(listener); return ListenerRegistration.removeFromList(listener, listeners); } /** * Returns the chain height at which the transaction appeared if confidence type is BUILDING. * @throws IllegalStateException if the confidence type is not BUILDING. */ public synchronized int getAppearedAtChainHeight() { if (getConfidenceType() != ConfidenceType.BUILDING) throw new IllegalStateException("Confidence type is " + getConfidenceType() + ", not BUILDING"); return appearedAtChainHeight; } /** * The chain height at which the transaction appeared, if it has been seen in the best chain. Automatically sets * the current type to {@link ConfidenceType#BUILDING} and depth to one. */ public synchronized void setAppearedAtChainHeight(int appearedAtChainHeight) { if (appearedAtChainHeight < 0) throw new IllegalArgumentException("appearedAtChainHeight out of range"); this.appearedAtChainHeight = appearedAtChainHeight; this.depth = 1; setConfidenceType(ConfidenceType.BUILDING); } /** * Returns a general statement of the level of confidence you can have in this transaction. */ public synchronized ConfidenceType getConfidenceType() { return confidenceType; } /** * Called by other objects in the system, like a {@link Wallet}, when new information about the confidence of a * transaction becomes available. */ public synchronized void setConfidenceType(ConfidenceType confidenceType) { // Don't inform the event listeners if the confidence didn't really change. if (confidenceType == this.confidenceType) return; this.confidenceType = confidenceType; if (confidenceType == ConfidenceType.PENDING) { depth = 0; appearedAtChainHeight = -1; workDone = BigInteger.ZERO; } } /** * Called by a {@link Peer} when a transaction is pending and announced by a peer. The more peers announce the * transaction, the more peers have validated it (assuming your internet connection is not being intercepted). * If confidence is currently unknown, sets it to {@link ConfidenceType#PENDING}. Listeners will be * invoked in this case. * * @param address IP address of the peer, used as a proxy for identity. */ public synchronized boolean markBroadcastBy(PeerAddress address) { if (!broadcastBy.addIfAbsent(address)) return false; // Duplicate. if (getConfidenceType() == ConfidenceType.UNKNOWN) { this.confidenceType = ConfidenceType.PENDING; } return true; } /** * Returns how many peers have been passed to {@link TransactionConfidence#markBroadcastBy}. */ public int numBroadcastPeers() { return broadcastBy.size(); } /** * Returns a snapshot of {@link PeerAddress}es that announced the transaction. */ public ListIterator<PeerAddress> getBroadcastBy() { return broadcastBy.listIterator(); } /** Returns true if the given address has been seen via markBroadcastBy() */ public boolean wasBroadcastBy(PeerAddress address) { return broadcastBy.contains(address); } @Override public synchronized String toString() { StringBuilder builder = new StringBuilder(); int peers = numBroadcastPeers(); if (peers > 0) { builder.append("Seen by "); builder.append(peers); if (peers > 1) builder.append(" peers. "); else builder.append(" peer. "); } switch (getConfidenceType()) { case UNKNOWN: builder.append("Unknown confidence level."); break; case DEAD: builder.append("Dead: overridden by double spend and will not confirm."); break; case PENDING: builder.append("Pending/unconfirmed."); break; case BUILDING: builder.append(String.format("Appeared in best chain at height %d, depth %d, work done %s.", getAppearedAtChainHeight(), getDepthInBlocks(), getWorkDone())); break; } return builder.toString(); } /** * Called by the wallet when the tx appears on the best chain and a new block is added to the top. * Updates the internal counter that tracks how deeply buried the block is. * Work is the value of block.getWork(). */ public synchronized boolean notifyWorkDone(Block block) throws VerificationException { if (getConfidenceType() != ConfidenceType.BUILDING) return false; // Should this be an assert? this.depth++; this.workDone = this.workDone.add(block.getWork()); return true; } /** * <p>Depth in the chain is an approximation of how much time has elapsed since the transaction has been confirmed. * On average there is supposed to be a new block every 10 minutes, but the actual rate may vary. The reference * (Satoshi) implementation considers a transaction impractical to reverse after 6 blocks, but as of EOY 2011 network * security is high enough that often only one block is considered enough even for high value transactions. For low * value transactions like songs, or other cheap items, no blocks at all may be necessary.</p> * * <p>If the transaction appears in the top block, the depth is one. If it's anything else (pending, dead, unknown) * the depth is zero.</p> */ public synchronized int getDepthInBlocks() { return depth; } /* * Set the depth in blocks. Having one block confirmation is a depth of one. */ public synchronized void setDepthInBlocks(int depth) { this.depth = depth; } /** * Returns the estimated amount of work (number of hashes performed) on this transaction. Work done is a measure of * security that is related to depth in blocks, but more predictable: the network will always attempt to produce six * blocks per hour by adjusting the difficulty target. So to know how much real computation effort is needed to * reverse a transaction, counting blocks is not enough. If a transaction has not confirmed, the result is zero. * @return estimated number of hashes needed to reverse the transaction. */ public synchronized BigInteger getWorkDone() { return workDone; } public synchronized void setWorkDone(BigInteger workDone) { this.workDone = workDone; } /** * If this transaction has been overridden by a double spend (is dead), this call returns the overriding transaction. * Note that this call <b>can return null</b> if you have migrated an old wallet, as pre-Jan 2012 wallets did not * store this information. * * @return the transaction that double spent this one * @throws IllegalStateException if confidence type is not OVERRIDDEN_BY_DOUBLE_SPEND. */ public synchronized Transaction getOverridingTransaction() { if (getConfidenceType() != ConfidenceType.DEAD) throw new IllegalStateException("Confidence type is " + getConfidenceType() + ", not OVERRIDDEN_BY_DOUBLE_SPEND"); return overridingTransaction; } /** * Called when the transaction becomes newly dead, that is, we learn that one of its inputs has already been spent * in such a way that the double-spending transaction takes precedence over this one. It will not become valid now * unless there is a re-org. Automatically sets the confidence type to DEAD. */ public synchronized void setOverridingTransaction(@Nullable Transaction overridingTransaction) { this.overridingTransaction = overridingTransaction; setConfidenceType(ConfidenceType.DEAD); } /** Returns a copy of this object. Event listeners are not duplicated. */ public synchronized TransactionConfidence duplicate() { TransactionConfidence c = new TransactionConfidence(transaction); // There is no point in this sync block, it's just to help FindBugs. synchronized (c) { c.broadcastBy.addAll(broadcastBy); c.confidenceType = confidenceType; c.overridingTransaction = overridingTransaction; c.appearedAtChainHeight = appearedAtChainHeight; return c; } } /** * Call this after adjusting the confidence, for cases where listeners should be notified. This has to be done * explicitly rather than being done automatically because sometimes complex changes to transaction states can * result in a series of confidence changes that are not really useful to see separately. By invoking listeners * explicitly, more precise control is available. Note that this will run the listeners on the user code thread. */ public void queueListeners(final Listener.ChangeReason reason) { for (final ListenerRegistration<Listener> registration : listeners) { registration.executor.execute(new Runnable() { @Override public void run() { registration.listener.onConfidenceChanged(transaction, reason); } }); } } /** * The source of a transaction tries to identify where it came from originally. For instance, did we download it * from the peer to peer network, or make it ourselves, or receive it via Bluetooth, or import it from another app, * and so on. This information is useful for {@link com.google.bitcoin.wallet.CoinSelector} implementations to risk analyze * transactions and decide when to spend them. */ public synchronized Source getSource() { return source; } /** * The source of a transaction tries to identify where it came from originally. For instance, did we download it * from the peer to peer network, or make it ourselves, or receive it via Bluetooth, or import it from another app, * and so on. This information is useful for {@link com.google.bitcoin.wallet.CoinSelector} implementations to risk analyze * transactions and decide when to spend them. */ public synchronized void setSource(Source source) { this.source = source; } /** * Returns a future that completes when the transaction has been confirmed by "depth" blocks. For instance setting * depth to one will wait until it appears in a block on the best chain, and zero will wait until it has been seen * on the network. */ public synchronized ListenableFuture<Transaction> getDepthFuture(final int depth, Executor executor) { final SettableFuture<Transaction> result = SettableFuture.create(); if (getDepthInBlocks() >= depth) { result.set(transaction); } addEventListener(new Listener() { @Override public void onConfidenceChanged(Transaction tx, ChangeReason reason) { if (getDepthInBlocks() >= depth) { removeEventListener(this); result.set(transaction); } } }, executor); return result; } public synchronized ListenableFuture<Transaction> getDepthFuture(final int depth) { return getDepthFuture(depth, Threading.USER_THREAD); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.scalar; import com.facebook.presto.metadata.FunctionKind; import com.facebook.presto.metadata.MetadataManager; import com.facebook.presto.metadata.Signature; import com.facebook.presto.operator.project.PageProcessor; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.block.ArrayBlock; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilder; import com.facebook.presto.spi.block.BlockBuilderStatus; import com.facebook.presto.spi.block.DictionaryBlock; import com.facebook.presto.spi.block.InterleavedBlock; import com.facebook.presto.spi.block.SliceArrayBlock; import com.facebook.presto.spi.type.MapType; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.gen.ExpressionCompiler; import com.facebook.presto.sql.gen.PageFunctionCompiler; import com.facebook.presto.sql.relational.CallExpression; import com.facebook.presto.sql.relational.RowExpression; import com.google.common.collect.ImmutableList; import io.airlift.slice.Slice; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OperationsPerInvocation; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.VerboseMode; import org.openjdk.jmh.runner.options.WarmupMode; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import static com.facebook.presto.spi.function.OperatorType.SUBSCRIPT; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.VarcharType.createUnboundedVarcharType; import static com.facebook.presto.sql.relational.Expressions.constant; import static com.facebook.presto.sql.relational.Expressions.field; import static com.facebook.presto.testing.TestingConnectorSession.SESSION; import static com.facebook.presto.util.StructuralTestUtil.mapType; import static com.google.common.base.Verify.verify; import static io.airlift.slice.Slices.utf8Slice; @SuppressWarnings("MethodMayBeStatic") @State(Scope.Thread) @OutputTimeUnit(TimeUnit.NANOSECONDS) @Fork(2) @Warmup(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS) @Measurement(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS) @BenchmarkMode(Mode.AverageTime) public class BenchmarkMapSubscript { private static final int POSITIONS = 1024; @Benchmark @OperationsPerInvocation(POSITIONS) public List<Page> mapSubscript(BenchmarkData data) throws Throwable { return ImmutableList.copyOf(data.getPageProcessor().process(SESSION, data.getPage())); } @SuppressWarnings("FieldMayBeFinal") @State(Scope.Thread) public static class BenchmarkData { @Param({"fix-width", "var-width", "dictionary"}) private String name = "dictionary"; @Param({"1", "13"}) private int mapSize = 13; private Page page; private PageProcessor pageProcessor; @Setup public void setup() { MetadataManager metadata = MetadataManager.createTestMetadataManager(); ExpressionCompiler compiler = new ExpressionCompiler(metadata, new PageFunctionCompiler(metadata, 0)); List<String> keys; switch (mapSize) { case 1: keys = ImmutableList.of("do_not_use"); break; case 13: keys = ImmutableList.of("is_inverted", "device_model", "country", "carrier_id", "network_type", "os_version", "device_brand", "device_type", "interface", "device_os", "app_version", "device_type_class", "browser"); break; default: throw new UnsupportedOperationException(); } verify(keys.size() == mapSize); MapType mapType; Block valueBlock; switch (name) { case "fix-width": mapType = mapType(createUnboundedVarcharType(), DOUBLE); valueBlock = createFixWidthValueBlock(POSITIONS, mapSize); break; case "var-width": mapType = mapType(createUnboundedVarcharType(), createUnboundedVarcharType()); valueBlock = createVarWidthValueBlock(POSITIONS, mapSize); break; case "dictionary": mapType = mapType(createUnboundedVarcharType(), createUnboundedVarcharType()); valueBlock = createDictionaryValueBlock(POSITIONS, mapSize); break; default: throw new UnsupportedOperationException(); } Block keyBlock = createKeyBlock(POSITIONS, keys); Block block = createMapBlock(POSITIONS, keyBlock, valueBlock); ImmutableList.Builder<RowExpression> projectionsBuilder = ImmutableList.builder(); Signature signature = new Signature( "$operator$" + SUBSCRIPT.name(), FunctionKind.SCALAR, mapType.getValueType().getTypeSignature(), mapType.getTypeSignature(), mapType.getKeyType().getTypeSignature()); for (int i = 0; i < mapSize; i++) { projectionsBuilder.add(new CallExpression( signature, mapType.getValueType(), ImmutableList.of(field(0, mapType), constant(utf8Slice(keys.get(i)), createUnboundedVarcharType())))); } ImmutableList<RowExpression> projections = projectionsBuilder.build(); pageProcessor = compiler.compilePageProcessor(Optional.empty(), projections).get(); page = new Page(block); } public PageProcessor getPageProcessor() { return pageProcessor; } public Page getPage() { return page; } private static Block createMapBlock(int positionCount, Block keyBlock, Block valueBlock) { InterleavedBlock interleavedBlock = new InterleavedBlock(new Block[] {keyBlock, valueBlock}); int[] offsets = new int[positionCount + 1]; int mapSize = keyBlock.getPositionCount() / positionCount; for (int i = 0; i < offsets.length; i++) { offsets[i] = mapSize * 2 * i; } return new ArrayBlock(positionCount, new boolean[positionCount], offsets, interleavedBlock); } private static Block createKeyBlock(int positionCount, List<String> keys) { Block keyDictionaryBlock = createSliceArrayBlock(keys); int[] keyIds = new int[positionCount * keys.size()]; for (int i = 0; i < keyIds.length; i++) { keyIds[i] = i % keys.size(); } return new DictionaryBlock(keyDictionaryBlock, keyIds); } private static Block createFixWidthValueBlock(int positionCount, int mapSize) { BlockBuilder valueBlockBuilder = DOUBLE.createBlockBuilder(new BlockBuilderStatus(), positionCount * mapSize); for (int i = 0; i < positionCount * mapSize; i++) { DOUBLE.writeDouble(valueBlockBuilder, ThreadLocalRandom.current().nextDouble()); } return valueBlockBuilder.build(); } private static Block createVarWidthValueBlock(int positionCount, int mapSize) { Type valueType = createUnboundedVarcharType(); BlockBuilder valueBlockBuilder = valueType.createBlockBuilder(new BlockBuilderStatus(), positionCount * mapSize); for (int i = 0; i < positionCount * mapSize; i++) { int wordLength = ThreadLocalRandom.current().nextInt(5, 10); valueType.writeSlice(valueBlockBuilder, utf8Slice(randomString(wordLength))); } return valueBlockBuilder.build(); } private static Block createDictionaryValueBlock(int positionCount, int mapSize) { double distinctRatio = 0.82; int dictionarySize = (int) (positionCount * mapSize * distinctRatio); List<String> dictionaryStrings = new ArrayList<>(dictionarySize); for (int i = 0; i < dictionarySize; i++) { int wordLength = ThreadLocalRandom.current().nextInt(5, 10); dictionaryStrings.add(randomString(wordLength)); } Block dictionaryBlock = createSliceArrayBlock(dictionaryStrings); int[] keyIds = new int[positionCount * mapSize]; for (int i = 0; i < keyIds.length; i++) { keyIds[i] = ThreadLocalRandom.current().nextInt(0, dictionarySize); } return new DictionaryBlock(dictionaryBlock, keyIds); } private static String randomString(int length) { String symbols = "abcdefghijklmnopqrstuvwxyz"; char[] chars = new char[length]; for (int i = 0; i < length; i++) { chars[i] = symbols.charAt(ThreadLocalRandom.current().nextInt(symbols.length())); } return new String(chars); } private static Block createSliceArrayBlock(List<String> keys) { // last position is reserved for null Slice[] sliceArray = new Slice[keys.size() + 1]; for (int i = 0; i < keys.size(); i++) { sliceArray[i] = utf8Slice(keys.get(i)); } return new SliceArrayBlock(sliceArray.length, sliceArray); } } public static void main(String[] args) throws Throwable { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); data.setup(); new BenchmarkMapSubscript().mapSubscript(data); Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) .warmupMode(WarmupMode.INDI) .include(".*" + BenchmarkMapSubscript.class.getSimpleName() + ".*") .build(); new Runner(options).run(); } }
/* * Copyright (c) 2017 Michael D'Arcy and Brianne O'Niel. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.group13.androidsdk.mycards; import android.support.test.InstrumentationRegistry; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Date; import java.util.List; import static org.junit.Assert.*; /** * Unit tests for MyCardsDBManager */ public class MyCardsDBManagerTest { private MyCardsDBManager dbm = null; private List<Card> sampleCardList1 = new ArrayList<>(); private List<NotificationRule> notificationRules = new ArrayList<>(); private static final String dbName = "mycardsdb2"; @Before public void setUp() throws Exception { InstrumentationRegistry.getTargetContext().deleteDatabase(dbName); dbm = MyCardsDBManager.getInstance(InstrumentationRegistry.getTargetContext(), dbName); sampleCardList1.clear(); sampleCardList1.add(new Card(1, "frontside", "backside", new Date(), new Date(), 2.2, 1, 0 )); sampleCardList1.add(new Card(2, "", "", new Date(), new Date(), 2.2, 2, 1)); sampleCardList1.add(new Card(3, "f3", "b3", new Date(0), new Date(0), -1, 0, 1)); sampleCardList1.add(new Card(700, "front4", "back4", new Date(), new Date(), 2.2, 1, 0)); sampleCardList1.get(1).addTag("mytag1"); sampleCardList1.get(1).addTag("mytag2"); sampleCardList1.get(3).addTag("mytag1"); notificationRules.add(new NotificationRule(1, new SimpleDatePattern(new Date(), new Date(), 0, 0) )); notificationRules.add(new NotificationRule(2, new SimpleDatePattern(new Date(10000), new Date(15000), 0, 0) )); notificationRules.add(new NotificationRule(3, new SimpleDatePattern(new Date(10000), new Date(15000), 10000, 5) )); notificationRules.add(new NotificationRule(700, new SimpleDatePattern(new Date(10000), new Date(), 0, 0) )); } @After public void tearDown() throws Exception { InstrumentationRegistry.getTargetContext().deleteDatabase(dbName); sampleCardList1.clear(); notificationRules.clear(); } @Test public void deleteCardById() throws Exception { for (Card card : sampleCardList1) { card.setId((int) dbm.upsertCard(card)); } for (Card card : sampleCardList1) { Card card2 = dbm.getCardById(card.getId()); assertEquals("Inserted and retreived cards must be equal", card, card2); } dbm.deleteCardById(3); assertNull("deleted card must not be in database", dbm.getCardById(3)); for (Card card : sampleCardList1) { Card card2 = dbm.getCardById(card.getId()); if (card.getId() != 3) { assertEquals("Inserted and retreived cards must be equal", card, card2); } } } @Test public void upsertCard() throws Exception { for (Card card : sampleCardList1) { card.setId((int) dbm.upsertCard(card)); } for (Card card : sampleCardList1) { Card card2 = dbm.getCardById(card.getId()); assertEquals("Inserted and retreived cards must be equal", card, card2); } assertEquals("Card must be inserted at proper index", sampleCardList1.get(3), dbm.getCardById(700) ); } @Test public void getCardById() throws Exception { for (Card card : sampleCardList1) { card.setId((int) dbm.upsertCard(card)); } assertNull("getCardById() must be null if ID does not exist", dbm.getCardById(4)); Card card2 = dbm.getCardById(3); assertEquals("Inserted and retreived cards must be equal", sampleCardList1.get(2), card2); } @Test public void getAllCards() throws Exception { for (Card card : sampleCardList1) { card.setId((int) dbm.upsertCard(card)); } assertNotNull("getAllCards() must not be null if there are cards", dbm.getAllCards()); assertEquals("getAllCards() must return all the cards in the database", 4, dbm.getAllCards().length ); InstrumentationRegistry.getTargetContext().deleteDatabase(dbName); dbm = MyCardsDBManager.getInstance(InstrumentationRegistry.getTargetContext(), dbName); assertEquals("getAllCards() must give an empty array if there are no cards", 0, dbm.getAllCards().length ); } @Test public void getCardsByTags() throws Exception { for (Card card : sampleCardList1) { card.setId((int) dbm.upsertCard(card)); } assertNotNull("getCardsByTags() must not be null if there are cards", dbm.getCardsByTags(new String[]{"nonexistentTag"}) ); assertEquals("getCardsByTags() must give an empty array if there are no cards", 0, dbm.getCardsByTags(new String[]{"nonexistentTag"}).length ); assertEquals("getCardsByTags() must return all the cards that have matching tags", 2, dbm.getCardsByTags(new String[]{"mytag1"}).length ); assertEquals("getCardsByTags() must return all the cards that have matching tags", 2, dbm.getCardsByTags(new String[]{"mytag1", "mytag2"}).length ); } @Test public void getCardsForReviewBefore() throws Exception { for (Card card : sampleCardList1) { card.setId((int) dbm.upsertCard(card)); } assertNotNull("getCardsForReviewBefore() must not be null if there are cards", dbm.getCardsForReviewBefore(new Date(), new String[]{"nonexistentTag"}) ); assertEquals("getCardsForReviewBefore() must give an empty array if there are no cards", 0, dbm.getCardsForReviewBefore(new Date(), new String[]{"nonexistentTag"}).length ); assertEquals("getCardsForReviewBefore() must return all the cards that have matching tags", 2, dbm.getCardsForReviewBefore(new Date(), new String[]{"mytag1"}).length ); assertEquals("getCardsForReviewBefore() must return all the cards that have matching tags", 2, dbm.getCardsForReviewBefore(new Date(), new String[]{"mytag1", "mytag2"}).length ); } @Test public void deleteNotificationRuleById() throws Exception { for (NotificationRule notificationRule : notificationRules) { notificationRule.setId((int) dbm.upsertNotificationRule(notificationRule)); } for (NotificationRule notificationRule : notificationRules) { NotificationRule notificationRule2 = dbm.getNotificationRuleById(notificationRule .getId()); assertEquals("Inserted and retreived notificationRules must be equal", notificationRule, notificationRule2 ); } dbm.deleteNotificationRuleById(3); assertNull("getCardById() must be null if ID does not exist", dbm.getNotificationRuleById(3) ); for (NotificationRule notificationRule : notificationRules) { NotificationRule notificationRule2 = dbm.getNotificationRuleById(notificationRule .getId()); if (notificationRule.getId() != 3) { assertEquals("Inserted and retreived notificationRules must be equal", notificationRule, notificationRule2 ); } } } @Test public void upsertNotificationRule() throws Exception { for (NotificationRule notificationRule : notificationRules) { notificationRule.setId((int) dbm.upsertNotificationRule(notificationRule)); } for (NotificationRule notificationRule : notificationRules) { NotificationRule notificationRule2 = dbm.getNotificationRuleById(notificationRule .getId()); assertEquals("Inserted and retreived notificationRules must be equal", notificationRule, notificationRule2 ); } assertEquals("NotificationRule must be inserted at proper index", notificationRules.get(3), dbm.getNotificationRuleById(700) ); } @Test public void getNotificationRuleById() throws Exception { for (NotificationRule notificationRule : notificationRules) { notificationRule.setId((int) dbm.upsertNotificationRule(notificationRule)); } assertNull("getNotificationRuleById() must be null if ID does not exist", dbm.getNotificationRuleById(4) ); NotificationRule rule2 = dbm.getNotificationRuleById(3); assertEquals("Inserted and retreived rules must be equal", notificationRules.get(2), rule2); assertEquals("Date must be the same", notificationRules.get(2).getDatePattern().getStartDate().getTime(), rule2.getDatePattern().getStartDate().getTime() ); } @Test public void getAllNotificationRules() throws Exception { for (NotificationRule notificationRule : notificationRules) { notificationRule.setId((int) dbm.upsertNotificationRule(notificationRule)); } assertNotNull("getAllNotificationRules() must not be null if there are rules", dbm.getAllNotificationRules() ); assertEquals("getAllCNotificationRules() must return all the rules in the database", 4, dbm.getAllNotificationRules().length ); InstrumentationRegistry.getTargetContext().deleteDatabase(dbName); dbm = MyCardsDBManager.getInstance(InstrumentationRegistry.getTargetContext(), dbName); assertEquals("getAllNotificationRules() must give an empty array if there are no rules", 0, dbm.getAllNotificationRules().length ); } @Test public void getAllNotificationRulesBeforeDate() throws Exception { } @Test public void setDoNotDisturb() throws Exception { // Duplicate tests to compensate for initial database state dbm.setDoNotDisturb(true); assertTrue(dbm.getDoNotDisturb()); dbm.setDoNotDisturb(false); assertFalse(dbm.getDoNotDisturb()); dbm.setDoNotDisturb(true); assertTrue(dbm.getDoNotDisturb()); dbm.setDoNotDisturb(false); assertFalse(dbm.getDoNotDisturb()); } @Test public void getDoNotDisturb() throws Exception { // Duplicate tests to compensate for initial database state dbm.setDoNotDisturb(true); assertTrue(dbm.getDoNotDisturb()); dbm.setDoNotDisturb(false); assertFalse(dbm.getDoNotDisturb()); dbm.setDoNotDisturb(true); assertTrue(dbm.getDoNotDisturb()); dbm.setDoNotDisturb(false); assertFalse(dbm.getDoNotDisturb()); } }
package org.iotp.server.controller; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import org.iotp.infomgt.dao.asset.AssetSearchQuery; import org.iotp.infomgt.dao.exception.IncorrectParameterException; import org.iotp.infomgt.dao.model.ModelConstants; import org.iotp.infomgt.data.Asset; import org.iotp.infomgt.data.Customer; import org.iotp.infomgt.data.TenantAssetType; import org.iotp.infomgt.data.id.AssetId; import org.iotp.infomgt.data.id.CustomerId; import org.iotp.infomgt.data.id.TenantId; import org.iotp.infomgt.data.page.TextPageData; import org.iotp.infomgt.data.page.TextPageLink; import org.iotp.infomgt.data.security.AssetCredentials; import org.iotp.server.exception.IoTPException; import org.iotp.server.msghub.ThingsMetaKafkaTopics; import org.iotp.server.service.security.model.SecurityUser; import org.springframework.http.HttpStatus; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import com.google.common.util.concurrent.ListenableFuture; import com.google.gson.JsonObject; @RestController @RequestMapping("/api") public class AssetController extends BaseController { @PreAuthorize("hasAnyAuthority('TENANT_ADMIN', 'CUSTOMER_USER')") @RequestMapping(value = "/asset/{assetId}", method = RequestMethod.GET) @ResponseBody public Asset getAssetById(@PathVariable("assetId") String strAssetId) throws IoTPException { checkParameter("assetId", strAssetId); try { AssetId assetId = new AssetId(toUUID(strAssetId)); return checkAssetId(assetId); } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/asset", method = RequestMethod.POST) @ResponseBody public Asset saveAsset(@RequestBody Asset asset) throws IoTPException { try { asset.setTenantId(getCurrentUser().getTenantId()); return checkNotNull(assetService.saveAsset(asset)); } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/asset/{assetId}", method = RequestMethod.DELETE) @ResponseStatus(value = HttpStatus.OK) public void deleteAsset(@PathVariable("assetId") String strAssetId) throws IoTPException { checkParameter("assetId", strAssetId); try { AssetId assetId = new AssetId(toUUID(strAssetId)); checkAssetId(assetId); assetService.deleteAsset(assetId); } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/customer/{customerId}/asset/{assetId}", method = RequestMethod.POST) @ResponseBody public Asset assignAssetToCustomer(@PathVariable("customerId") String strCustomerId, @PathVariable("assetId") String strAssetId) throws IoTPException { checkParameter("customerId", strCustomerId); checkParameter("assetId", strAssetId); try { CustomerId customerId = new CustomerId(toUUID(strCustomerId)); checkCustomerId(customerId); AssetId assetId = new AssetId(toUUID(strAssetId)); checkAssetId(assetId); return checkNotNull(assetService.assignAssetToCustomer(assetId, customerId)); } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/customer/asset/{assetId}", method = RequestMethod.DELETE) @ResponseBody public Asset unassignAssetFromCustomer(@PathVariable("assetId") String strAssetId) throws IoTPException { checkParameter("assetId", strAssetId); try { AssetId assetId = new AssetId(toUUID(strAssetId)); Asset asset = checkAssetId(assetId); if (asset.getCustomerId() == null || asset.getCustomerId().getId().equals(ModelConstants.NULL_UUID)) { throw new IncorrectParameterException("Asset isn't assigned to any customer!"); } return checkNotNull(assetService.unassignAssetFromCustomer(assetId)); } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/customer/public/asset/{assetId}", method = RequestMethod.POST) @ResponseBody public Asset assignAssetToPublicCustomer(@PathVariable("assetId") String strAssetId) throws IoTPException { checkParameter("assetId", strAssetId); try { AssetId assetId = new AssetId(toUUID(strAssetId)); Asset asset = checkAssetId(assetId); Customer publicCustomer = customerService.findOrCreatePublicCustomer(asset.getTenantId()); return checkNotNull(assetService.assignAssetToCustomer(assetId, publicCustomer.getId())); } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAnyAuthority('TENANT_ADMIN', 'CUSTOMER_USER')") @RequestMapping(value = "/asset/{assetId}/credentials", method = RequestMethod.GET) @ResponseBody public AssetCredentials getAssetCredentialsByAssetId(@PathVariable("assetId") String strAssetId) throws IoTPException { checkParameter("assetId", strAssetId); try { AssetId assetId = new AssetId(toUUID(strAssetId)); checkAssetId(assetId); return checkNotNull(assetCredentialsService.findAssetCredentialsByAssetId(assetId)); } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/asset/credentials", method = RequestMethod.POST) @ResponseBody public AssetCredentials saveAssetCredentials(@RequestBody AssetCredentials assetCredentials) throws IoTPException { checkNotNull(assetCredentials); try { checkAssetId(assetCredentials.getAssetId()); AssetCredentials result = checkNotNull(assetCredentialsService.updateAssetCredentials(assetCredentials)); // actorService.onCredentialsUpdate(getCurrentUser().getTenantId(), // assetCredentials.getAssetId()); JsonObject json = new JsonObject(); json.addProperty(ThingsMetaKafkaTopics.TENANT_ID, getCurrentUser().getTenantId().toString()); json.addProperty(ThingsMetaKafkaTopics.ASSET_ID, assetCredentials.getAssetId().toString()); json.addProperty(ThingsMetaKafkaTopics.EVENT, ThingsMetaKafkaTopics.EVENT_CREDENTIALS_UPDATE); msgProducer.send(ThingsMetaKafkaTopics.METADATA_ASSET_TOPIC, assetCredentials.getAssetId().toString(), json.toString()); return result; } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/tenant/assets", params = { "limit" }, method = RequestMethod.GET) @ResponseBody public TextPageData<Asset> getTenantAssets(@RequestParam int limit, @RequestParam(required = false) String type, @RequestParam(required = false) String textSearch, @RequestParam(required = false) String idOffset, @RequestParam(required = false) String textOffset) throws IoTPException { try { TenantId tenantId = getCurrentUser().getTenantId(); TextPageLink pageLink = createPageLink(limit, textSearch, idOffset, textOffset); if (type != null && type.trim().length() > 0) { return checkNotNull(assetService.findAssetsByTenantIdAndType(tenantId, type, pageLink)); } else { return checkNotNull(assetService.findAssetsByTenantId(tenantId, pageLink)); } } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/tenant/assets", params = { "assetName" }, method = RequestMethod.GET) @ResponseBody public Asset getTenantAsset(@RequestParam String assetName) throws IoTPException { try { TenantId tenantId = getCurrentUser().getTenantId(); return checkNotNull(assetService.findAssetByTenantIdAndName(tenantId, assetName)); } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAnyAuthority('TENANT_ADMIN', 'CUSTOMER_USER')") @RequestMapping(value = "/customer/{customerId}/assets", params = { "limit" }, method = RequestMethod.GET) @ResponseBody public TextPageData<Asset> getCustomerAssets(@PathVariable("customerId") String strCustomerId, @RequestParam int limit, @RequestParam(required = false) String type, @RequestParam(required = false) String textSearch, @RequestParam(required = false) String idOffset, @RequestParam(required = false) String textOffset) throws IoTPException { checkParameter("customerId", strCustomerId); try { TenantId tenantId = getCurrentUser().getTenantId(); CustomerId customerId = new CustomerId(toUUID(strCustomerId)); checkCustomerId(customerId); TextPageLink pageLink = createPageLink(limit, textSearch, idOffset, textOffset); if (type != null && type.trim().length() > 0) { return checkNotNull( assetService.findAssetsByTenantIdAndCustomerIdAndType(tenantId, customerId, type, pageLink)); } else { return checkNotNull(assetService.findAssetsByTenantIdAndCustomerId(tenantId, customerId, pageLink)); } } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAnyAuthority('TENANT_ADMIN', 'CUSTOMER_USER')") @RequestMapping(value = "/assets", params = { "assetIds" }, method = RequestMethod.GET) @ResponseBody public List<Asset> getAssetsByIds(@RequestParam("assetIds") String[] strAssetIds) throws IoTPException { checkArrayParameter("assetIds", strAssetIds); try { SecurityUser user = getCurrentUser(); TenantId tenantId = user.getTenantId(); CustomerId customerId = user.getCustomerId(); List<AssetId> assetIds = new ArrayList<>(); for (String strAssetId : strAssetIds) { assetIds.add(new AssetId(toUUID(strAssetId))); } ListenableFuture<List<Asset>> assets; if (customerId == null || customerId.isNullUid()) { assets = assetService.findAssetsByTenantIdAndIdsAsync(tenantId, assetIds); } else { assets = assetService.findAssetsByTenantIdCustomerIdAndIdsAsync(tenantId, customerId, assetIds); } return checkNotNull(assets.get()); } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAnyAuthority('TENANT_ADMIN', 'CUSTOMER_USER')") @RequestMapping(value = "/assets", method = RequestMethod.POST) @ResponseBody public List<Asset> findByQuery(@RequestBody AssetSearchQuery query) throws IoTPException { checkNotNull(query); checkNotNull(query.getParameters()); checkNotNull(query.getAssetTypes()); checkEntityId(query.getParameters().getEntityId()); try { List<Asset> assets = checkNotNull(assetService.findAssetsByQuery(query).get()); assets = assets.stream().filter(asset -> { try { checkAsset(asset); return true; } catch (IoTPException e) { return false; } }).collect(Collectors.toList()); return assets; } catch (Exception e) { throw handleException(e); } } @PreAuthorize("hasAnyAuthority('TENANT_ADMIN', 'CUSTOMER_USER')") @RequestMapping(value = "/asset/types", method = RequestMethod.GET) @ResponseBody public List<TenantAssetType> getAssetTypes() throws IoTPException { try { SecurityUser user = getCurrentUser(); TenantId tenantId = user.getTenantId(); ListenableFuture<List<TenantAssetType>> assetTypes = assetService.findAssetTypesByTenantId(tenantId); return checkNotNull(assetTypes.get()); } catch (Exception e) { throw handleException(e); } } }
/* * Copyright (c) 2021 SQLines * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sqlines.studio.view.mainwindow; import com.sqlines.studio.view.mainwindow.event.RecentFileEvent; import de.jangassen.MenuToolkit; import org.jetbrains.annotations.NotNull; import javafx.collections.ObservableList; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.scene.control.CheckMenuItem; import javafx.scene.control.Menu; import javafx.scene.control.MenuItem; import javafx.scene.control.SeparatorMenuItem; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyCodeCombination; import javafx.scene.input.KeyCombination; /** * Drop down menus at the top of the application window. * <p> * Fires {@link RecentFileEvent} when the recent file item * in the Open Recent menu is clicked. * <p> * Contains the following menus: * <li>Main * <li>File * <li>Edit * <li>View * <li>Tools * <li>Help * * @apiNote Initially, all menu items are active. */ class MenuBar extends javafx.scene.control.MenuBar { // Main tab menu items private final MenuItem aboutMenuItem = new MenuItem(); private final MenuItem preferencesMenuItem = new MenuItem(); // File tab menu items private final MenuItem newTabMenuItem = new MenuItem(); private final MenuItem closeTabMenuItem = new MenuItem(); private final MenuItem nextTabMenuItem = new MenuItem(); private final MenuItem prevTabMenuItem = new MenuItem(); private final MenuItem openFileMenuItem = new MenuItem(); private final MenuItem saveFileMenuItem = new MenuItem(); private final MenuItem saveFileAsMenuItem = new MenuItem(); private final MenuItem clearRecentMenuItem = new MenuItem(); private final Menu openRecentMenu = new Menu(); // Edit tab menu items private final MenuItem undoMenuItem = new MenuItem(); private final MenuItem redoMenuItem = new MenuItem(); private final MenuItem selectAllMenuItem = new MenuItem(); private final MenuItem cutMenuItem = new MenuItem(); private final MenuItem copyMenuItem = new MenuItem(); private final MenuItem pasteMenuItem = new MenuItem(); // View tab menu items private final MenuItem zoomInMenuItem = new MenuItem(); private final MenuItem zoomOutMenuItem = new MenuItem(); private final CheckMenuItem statusBarMenuItem = new CheckMenuItem(); private final CheckMenuItem targetFieldMenuItem = new CheckMenuItem(); private final CheckMenuItem wrappingMenuItem = new CheckMenuItem(); private final CheckMenuItem highlighterMenuItem = new CheckMenuItem(); private final CheckMenuItem lineNumbersMenuItem = new CheckMenuItem(); // Tools tab menu items private final MenuItem runMenuItem = new MenuItem(); // Help tab menu items private final MenuItem onlineHelpMenuItem = new MenuItem(); private final MenuItem openSiteMenuItem = new MenuItem(); private EventHandler<RecentFileEvent> recentFileEventHandler; public MenuBar() { if (System.getProperty("os.name").toLowerCase().startsWith("mac")) { useSystemMenuBarProperty().set(true); MenuToolkit toolkit = MenuToolkit.toolkit(); Menu defaultMenu = toolkit.createDefaultApplicationMenu("SQLines Studio"); toolkit.setApplicationMenu(makeMainMenu(defaultMenu)); toolkit.setMenuBar(this); } makeFileMenu(); makeEditMenu(); makeViewMenu(); makeToolsMenu(); makeHelpMenu(); } /** * Adds new recent file path to the Open Recent menu. * * @param filePath file path to add * * @throws IllegalArgumentException if file path is empty */ public void addRecentFile(@NotNull String filePath) { if (filePath.isEmpty()) { throw new IllegalArgumentException("File path is empty"); } MenuItem newFile = new MenuItem(filePath); newFile.setOnAction(event -> { RecentFileEvent recentFileEvent = new RecentFileEvent(filePath); fireEvent(recentFileEvent); if (recentFileEventHandler != null) { recentFileEventHandler.handle(recentFileEvent); } }); openRecentMenu.getItems().add(0, newFile); } /** * Deletes all recent files paths in the Open Recent menu. */ public void clearRecentFiles() { ObservableList<MenuItem> items = openRecentMenu.getItems(); items.removeIf(item -> items.size() != 2); } /** * Moves the specified recent file path in the Open Recent menu to the specified position. * * @param filePath file path to move * @param moveTo index to move the file path to * * @throws IndexOutOfBoundsException if moveTo is out of range * (moveTo < 0 || moveTo >= the number of recent file paths) * @throws IllegalArgumentException if no such recent file path exists */ public void moveRecentFile(@NotNull String filePath, int moveTo) { ObservableList<MenuItem> items = openRecentMenu.getItems(); if (moveTo < 0 || moveTo >= items.size()) { int endInd = (items.size() == 0) ? 0 : items.size() - 1; String errorMsg = "Index is out of range: (0:" + endInd + ") expected, " + moveTo + " provided"; throw new IndexOutOfBoundsException(errorMsg); } MenuItem item = null; for (MenuItem menuItem : items) { if (menuItem.getText().equals(filePath)) { item = menuItem; break; } } if (item == null) { throw new IllegalArgumentException("No such recent file exists: " + filePath); } items.remove(item); items.add(moveTo, item); } /** * Defines the selection state of the Show Status Bar menu item. * * @param isSelected makes the menu item selected if true, unselected otherwise */ public void setStatusBarSelected(boolean isSelected) { statusBarMenuItem.setSelected(isSelected); } /** * Defines the selection state of the Always Show Target Field menu item. * * @param isSelected makes the menu item selected if true, unselected otherwise */ public void setTargetFieldSelected(boolean isSelected) { targetFieldMenuItem.setSelected(isSelected); } /** * Defines the selection state of the Wrap Lines To Editor Width menu item * in the Editor menu. * * @param isSelected makes the menu item selected if true, unselected otherwise */ public void setWrappingSelected(boolean isSelected) { wrappingMenuItem.setSelected(isSelected); } /** * Defines the selection state of the Highlighter menu item in the Editor menu. * * @param isSelected makes the menu item selected if true, unselected otherwise */ public void setHighlighterSelected(boolean isSelected) { highlighterMenuItem.setSelected(isSelected); } /** * Defines the selection state of the Line Numbers menu item in the Editor menu. * * @param isSelected makes the menu item selected if true, unselected otherwise */ public void setLineNumbersSelected(boolean isSelected) { lineNumbersMenuItem.setSelected(isSelected); } /** * Defines the state of the Close Tab menu item. * * @param isEnabled makes the menu item enabled if true, disabled otherwise */ public void setCloseTabState(boolean isEnabled) { closeTabMenuItem.setDisable(!isEnabled); } /** * Defines the state of the Next Tab menu item. * * @param isEnabled makes the menu item enabled if true, disabled otherwise */ public void setNextTabState(boolean isEnabled) { nextTabMenuItem.setDisable(!isEnabled); } /** * Defines the state of the Previous Tab menu item. * * @param isEnabled makes the menu item enabled if true, disabled otherwise */ public void setPrevTabState(boolean isEnabled) { prevTabMenuItem.setDisable(!isEnabled); } /** * Defines the state of the Open Recent menu. * * @param isEnabled makes the menu enabled if true, disabled otherwise */ public void setOpenRecentState(boolean isEnabled) { openRecentMenu.setDisable(!isEnabled); } /** * Defines the state of the Undo menu item. * * @param isEnabled makes the menu item enabled if true, disabled otherwise */ public void setUndoState(boolean isEnabled) { undoMenuItem.setDisable(!isEnabled); } /** * Defines the state of the Redo menu item. * * @param isEnabled makes the menu item enabled if true, disabled otherwise */ public void setRedoState(boolean isEnabled) { redoMenuItem.setDisable(!isEnabled); } /** * Sets the action which is invoked when the About menu item is clicked. * * @param action the action to register */ public void setOnAboutAction(@NotNull EventHandler<ActionEvent> action) { aboutMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Preferences menu item is clicked. * * @param action the action to register */ public void setOnPreferencesAction(@NotNull EventHandler<ActionEvent> action) { preferencesMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the New Tab menu item is clicked. * * @param action the action to register */ public void setOnNewTabAction(@NotNull EventHandler<ActionEvent> action) { newTabMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Close Tab menu item is clicked. * * @param action the action to register */ public void setOnCloseTabAction(@NotNull EventHandler<ActionEvent> action) { closeTabMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Next Tab menu item is clicked. * * @param action the action to register */ public void setOnNextTabAction(@NotNull EventHandler<ActionEvent> action) { nextTabMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Previous Tab menu item is clicked. * * @param action the action to register */ public void setOnPrevTabAction(@NotNull EventHandler<ActionEvent> action) { prevTabMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Open File menu item is clicked. * * @param action the action to register */ public void setOnOpenFileAction(@NotNull EventHandler<ActionEvent> action) { openFileMenuItem.setOnAction(action); } /** * Sets the action which is invoked when * the Recent File menu item in the Open Recent menu is clicked. * * @param action the action to register */ public void setOnOpenRecentAction(@NotNull EventHandler<RecentFileEvent> action) { recentFileEventHandler = action; } /** * Sets the action which is invoked when the Clear menu item * in the Open Recent menu is clicked. * * @param action the action to register */ public void setOnClearRecentAction(@NotNull EventHandler<ActionEvent> action) { clearRecentMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Save File menu item is clicked. * * @param action the action to register */ public void setOnSaveFileAction(@NotNull EventHandler<ActionEvent> action) { saveFileMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Save File as menu item is clicked. * * @param action the action to register */ public void setOnSaveAsAction(@NotNull EventHandler<ActionEvent> action) { saveFileAsMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Undo menu item is clicked. * * @param action the action to register */ public void setOnUndoAction(@NotNull EventHandler<ActionEvent> action) { undoMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Redo menu item is clicked. * * @param action the action to register */ public void setOnRedoAction(@NotNull EventHandler<ActionEvent> action) { redoMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Select All menu item is clicked. * * @param action the action to register */ public void setOnSelectAllAction(@NotNull EventHandler<ActionEvent> action) { selectAllMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Cut menu item is clicked. * * @param action the action to register */ public void setOnCutAction(@NotNull EventHandler<ActionEvent> action) { cutMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Copy menu item is clicked. * * @param action the action to register */ public void setOnCopyAction(@NotNull EventHandler<ActionEvent> action) { copyMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Paste menu item is clicked. * * @param action the action to register */ public void setOnPasteAction(@NotNull EventHandler<ActionEvent> action) { pasteMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Zoom In menu item is clicked. * * @param action the action to register */ public void setOnZoomInAction(@NotNull EventHandler<ActionEvent> action) { zoomInMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Zoom Out menu item is clicked. * * @param action the action to register */ public void setOnZoomOutAction(@NotNull EventHandler<ActionEvent> action) { zoomOutMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Show Status Bar * check menu item in the Editor menu is clicked. * * @param action the action to register */ public void setOnStatusBarAction(@NotNull EventHandler<ActionEvent> action) { statusBarMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Always Show Target Field * check menu item in the Editor menu check menu item is clicked. * * @param action the action to register */ public void setOnTargetFieldAction(@NotNull EventHandler<ActionEvent> action) { targetFieldMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Wrap Lines To Editor Width * check menu item in the Editor menu is clicked. * * @param action the action to register */ public void setOnWrappingAction(@NotNull EventHandler<ActionEvent> action) { wrappingMenuItem.setOnAction(action); } /** * Sets the action which is invoked when * the Highlighter check menu item in the Editor menu is clicked. * * @param action the action to register */ public void setOnHighlighterAction(@NotNull EventHandler<ActionEvent> action) { highlighterMenuItem.setOnAction(action); } /** * Sets the action which is invoked when * the Line Numbers check menu item in the Editor menu is clicked. * * @param action the action to register */ public void setOnLineNumbersAction(@NotNull EventHandler<ActionEvent> action) { lineNumbersMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Run Conversion menu item is clicked. * * @param action the action to register */ public void setOnRunAction(@NotNull EventHandler<ActionEvent> action) { runMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Open Online Help menu item is clicked. * * @param action the action to register */ public void setOnOnlineHelpAction(@NotNull EventHandler<ActionEvent> action) { onlineHelpMenuItem.setOnAction(action); } /** * Sets the action which is invoked when the Open Official Site menu item is clicked. * * @param action the action to register */ public void setOnOpenSiteAction(@NotNull EventHandler<ActionEvent> action) { openSiteMenuItem.setOnAction(action); } private @NotNull Menu makeMainMenu(@NotNull Menu defaultMenu) { aboutMenuItem.setText("About SQLines Studio..."); preferencesMenuItem.setText("Preferences..."); defaultMenu.getItems().set(0, aboutMenuItem); defaultMenu.getItems().set(1, preferencesMenuItem); defaultMenu.getItems().add(2, new SeparatorMenuItem()); return defaultMenu; } private void makeFileMenu() { newTabMenuItem.setText("New Tab"); KeyCombination newTab = new KeyCodeCombination(KeyCode.T, KeyCombination.SHORTCUT_DOWN); newTabMenuItem.setAccelerator(newTab); closeTabMenuItem.setText("Close Tab"); KeyCombination closeTab = new KeyCodeCombination(KeyCode.W, KeyCombination.SHORTCUT_DOWN); closeTabMenuItem.setAccelerator(closeTab); nextTabMenuItem.setText("Next Tab"); KeyCombination nextTab = new KeyCodeCombination(KeyCode.RIGHT, KeyCombination.SHORTCUT_DOWN); nextTabMenuItem.setAccelerator(nextTab); prevTabMenuItem.setText("Previous Tab"); KeyCombination prevTab = new KeyCodeCombination(KeyCode.LEFT, KeyCombination.SHORTCUT_DOWN); prevTabMenuItem.setAccelerator(prevTab); openFileMenuItem.setText("Open File..."); KeyCombination openFile = new KeyCodeCombination(KeyCode.O, KeyCombination.SHORTCUT_DOWN); openFileMenuItem.setAccelerator(openFile); clearRecentMenuItem.setText("Clear"); openRecentMenu.setText("Open Recent"); openRecentMenu.getItems().add(new SeparatorMenuItem()); openRecentMenu.getItems().add(clearRecentMenuItem); saveFileMenuItem.setText("Save File"); KeyCombination saveFile = new KeyCodeCombination(KeyCode.S, KeyCombination.SHORTCUT_DOWN); saveFileMenuItem.setAccelerator(saveFile); saveFileAsMenuItem.setText("Save File As..."); KeyCombination saveFileAs = new KeyCodeCombination(KeyCode.S, KeyCombination.SHORTCUT_DOWN, KeyCombination.SHIFT_DOWN); saveFileAsMenuItem.setAccelerator(saveFileAs); Menu fileMenu = new Menu("File"); fileMenu.getItems().addAll(newTabMenuItem, closeTabMenuItem, nextTabMenuItem, prevTabMenuItem); fileMenu.getItems().add(new SeparatorMenuItem()); fileMenu.getItems().addAll(openFileMenuItem, openRecentMenu, saveFileMenuItem, saveFileAsMenuItem); getMenus().add(fileMenu); } private void makeEditMenu() { undoMenuItem.setText("Undo"); redoMenuItem.setText("Redo"); selectAllMenuItem.setText("Select All"); cutMenuItem.setText("Cut"); copyMenuItem.setText("Copy"); pasteMenuItem.setText("Paste"); Menu editMenu = new Menu("Edit"); editMenu.getItems().addAll(undoMenuItem, redoMenuItem); editMenu.getItems().add(new SeparatorMenuItem()); editMenu.getItems().addAll(selectAllMenuItem, cutMenuItem, copyMenuItem, pasteMenuItem); getMenus().add(editMenu); } private void makeViewMenu() { zoomInMenuItem.setText("Zoom In"); KeyCombination zoomIn = new KeyCodeCombination(KeyCode.A, KeyCombination.SHIFT_DOWN, KeyCombination.SHORTCUT_DOWN); zoomInMenuItem.setAccelerator(zoomIn); zoomOutMenuItem.setText("Zoom Out"); KeyCombination zoomOut = new KeyCodeCombination(KeyCode.E, KeyCombination.SHIFT_DOWN, KeyCombination.SHORTCUT_DOWN); zoomOutMenuItem.setAccelerator(zoomOut); statusBarMenuItem.setText("Show Status Bar"); targetFieldMenuItem.setText("Always Show Target Field"); wrappingMenuItem.setText("Wrap Lines To Editor Width"); highlighterMenuItem.setText("Highlighter"); lineNumbersMenuItem.setText("Line Numbers"); Menu editorMenu = new Menu("Editor"); editorMenu.getItems().addAll(wrappingMenuItem, highlighterMenuItem, lineNumbersMenuItem); Menu viewMenu = new Menu("View"); viewMenu.getItems().addAll(zoomInMenuItem, zoomOutMenuItem); viewMenu.getItems().add(new SeparatorMenuItem()); viewMenu.getItems().addAll(statusBarMenuItem, targetFieldMenuItem); viewMenu.getItems().add(new SeparatorMenuItem()); viewMenu.getItems().add(editorMenu); getMenus().add(viewMenu); } private void makeToolsMenu() { runMenuItem.setText("Run Conversion"); KeyCombination run = new KeyCodeCombination(KeyCode.R, KeyCombination.SHORTCUT_DOWN); runMenuItem.setAccelerator(run); Menu toolsMenu = new Menu("Tools"); if (System.getProperty("os.name").toLowerCase().startsWith("win")) { preferencesMenuItem.setText("Settings"); toolsMenu.getItems().add(preferencesMenuItem); toolsMenu.getItems().add(new SeparatorMenuItem()); } toolsMenu.getItems().add(runMenuItem); getMenus().add(toolsMenu); } private void makeHelpMenu() { onlineHelpMenuItem.setText("Online Help..."); openSiteMenuItem.setText("Official Site..."); Menu helpMenu = new Menu("Help"); helpMenu.getItems().addAll(onlineHelpMenuItem, openSiteMenuItem); getMenus().add(helpMenu); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.msiiplab.recsys.implicit; import java.util.ArrayList; import java.util.Collection; import org.apache.mahout.cf.taste.common.Refreshable; import org.apache.mahout.cf.taste.common.TasteException; import org.apache.mahout.cf.taste.impl.common.FastByIDMap; import org.apache.mahout.cf.taste.impl.common.FastIDSet; import org.apache.mahout.cf.taste.impl.common.LongPrimitiveIterator; import org.apache.mahout.cf.taste.impl.common.RefreshHelper; import org.apache.mahout.cf.taste.impl.similarity.AbstractItemSimilarity; import org.apache.mahout.cf.taste.model.DataModel; import org.apache.mahout.cf.taste.model.Preference; import org.apache.mahout.cf.taste.model.PreferenceArray; import org.apache.mahout.cf.taste.similarity.PreferenceInferrer; import org.apache.mahout.cf.taste.similarity.UserSimilarity; import com.msiiplab.recsys.lfm.AspectModelRecommender; /** * <p> * An implementation of a "similarity" based on the <a href= * "http://en.wikipedia.org/wiki/Jaccard_index#Tanimoto_coefficient_.28extended_Jaccard_coefficient.29" * > Tanimoto coefficient</a>, or extended <a * href="http://en.wikipedia.org/wiki/Jaccard_index">Jaccard coefficient</a>. * </p> * * <p> * This is intended for "binary" data sets where a user either expresses a * generic "yes" preference for an item or has no preference. The actual * preference values do not matter here, only their presence or absence. * </p> * * <p> * The value returned is in [0,1]. * </p> * * <p> * Modified by Heda Wang, by adding LFM weight to items. * </p> */ public final class TanimotoLFMCoefficientSimilarity extends AbstractItemSimilarity implements UserSimilarity { private FastByIDMap<Double> mUserPrefEntropy; private FastByIDMap<Double> mItemPrefEntropy; private AspectModelRecommender mAspectModelRecommender; private final int mFactor; private void refreshAspectModel() throws TasteException { mAspectModelRecommender = new AspectModelRecommender(getDataModel(), mFactor); mUserPrefEntropy = new FastByIDMap<Double>(); mItemPrefEntropy = new FastByIDMap<Double>(); int numOfLatentFactor = mAspectModelRecommender.getNumOfLatentFactor(); ArrayList<FastByIDMap<Double>> userConditional = mAspectModelRecommender.getUserConditional(); ArrayList<FastByIDMap<Double>> itemConditional = mAspectModelRecommender.getItemConditional(); ArrayList<Double> latentPrior = mAspectModelRecommender.getLatentPrior(); double[] distribution = new double[numOfLatentFactor]; for (LongPrimitiveIterator it_user = getDataModel().getUserIDs(); it_user.hasNext();) { long userID = it_user.nextLong(); for (int i = 0; i < distribution.length; i++) { distribution[i] = userConditional.get(i).get(userID) * latentPrior.get(i); } double entropy = getEntropy(distribution); mUserPrefEntropy.put(userID, entropy); } for (LongPrimitiveIterator it_item = getDataModel().getItemIDs(); it_item.hasNext();) { long itemID = it_item.nextLong(); for (int i = 0; i < distribution.length; i++) { distribution[i] = itemConditional.get(i).get(itemID) * latentPrior.get(i); } double entropy = getEntropy(distribution); mItemPrefEntropy.put(itemID, entropy); } } private double getEntropy(double[] distribution) { double sum = 0; double entropy = 0; synchronized (distribution) { for (int i = 0; i < distribution.length; i++) { sum += distribution[i]; } for (int i = 0; i < distribution.length; i++) { double p = distribution[i] / sum; if (p != 0) { entropy += - p * Math.log(p); } else { p = Double.MIN_NORMAL; entropy += - p * Math.log(p); } } } return entropy; } public TanimotoLFMCoefficientSimilarity(DataModel dataModel, int factor) { super(dataModel); mFactor = factor; try { refreshAspectModel(); } catch (TasteException e) { e.printStackTrace(); } } /** * @throws UnsupportedOperationException */ @Override public void setPreferenceInferrer(PreferenceInferrer inferrer) { throw new UnsupportedOperationException(); } @Override public double userSimilarity(long userID1, long userID2) throws TasteException { DataModel dataModel = getDataModel(); FastIDSet xPrefs = dataModel.getItemIDsFromUser(userID1); FastIDSet yPrefs = dataModel.getItemIDsFromUser(userID2); int xPrefsSize = xPrefs.size(); int yPrefsSize = yPrefs.size(); if (xPrefsSize == 0 && yPrefsSize == 0) { return Double.NaN; } if (xPrefsSize == 0 || yPrefsSize == 0) { return 0.0; } double intersection = 0.0; double union = 0.0; for (LongPrimitiveIterator it_item = xPrefs.iterator(); it_item.hasNext();) { long itemID = (long) it_item.nextLong(); double weight = mItemPrefEntropy.get(itemID); if (yPrefs.contains(itemID)) { intersection += weight; union -= weight; } union += weight; } for (LongPrimitiveIterator it_item = yPrefs.iterator(); it_item.hasNext();) { long itemID = (long) it_item.nextLong(); double weight = mItemPrefEntropy.get(itemID); union += weight; } return intersection / union; } @Override public double itemSimilarity(long itemID1, long itemID2) throws TasteException { FastIDSet preferring1 = toUserFastIDSet(getDataModel().getPreferencesForItem(itemID1)); return doItemSimilarity(itemID1, itemID2, preferring1); } private FastIDSet toUserFastIDSet(PreferenceArray array) { FastIDSet fastIDSet = new FastIDSet(); for (Preference preference : array) { fastIDSet.add(preference.getUserID()); } return fastIDSet; } @Override public double[] itemSimilarities(long itemID1, long[] itemID2s) throws TasteException { FastIDSet preferring1 = toUserFastIDSet(getDataModel().getPreferencesForItem(itemID1)); int length = itemID2s.length; double[] result = new double[length]; for (int i = 0; i < length; i++) { result[i] = doItemSimilarity(itemID1, itemID2s[i], preferring1); } return result; } private double doItemSimilarity(long itemID1, long itemID2, FastIDSet preferring1) throws TasteException { double intersection = 0.0; double union = 0.0; for (Preference pref : getDataModel().getPreferencesForItem(itemID2)) { long userID = pref.getUserID(); double weight = mUserPrefEntropy.get(userID); if (preferring1.contains(userID)) { intersection += weight; union -= weight; } union += weight; } for (LongPrimitiveIterator it_user = preferring1.iterator(); it_user.hasNext();) { long userID = (long) it_user.nextLong(); double weight = mUserPrefEntropy.get(userID); union += weight; } if (intersection == 0) { return Double.NaN; } return intersection / union; } @Override public void refresh(Collection<Refreshable> alreadyRefreshed) { alreadyRefreshed = RefreshHelper.buildRefreshed(alreadyRefreshed); RefreshHelper.maybeRefresh(alreadyRefreshed, getDataModel()); try { refreshAspectModel(); } catch (TasteException e) { e.printStackTrace(); } } @Override public String toString() { return "TanimotoCoefficientSimilarity[dataModel:" + getDataModel() + ']'; } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.birt; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.StringWriter; import java.sql.SQLException; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.logging.Level; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.GeneralException; import org.apache.ofbiz.base.util.UtilGenerics; import org.apache.ofbiz.base.util.UtilMisc; import org.apache.ofbiz.base.util.UtilProperties; import org.apache.ofbiz.base.util.UtilValidate; import org.apache.ofbiz.base.util.string.FlexibleStringExpander; import org.apache.ofbiz.birt.flexible.BirtUtil; import org.apache.ofbiz.entity.Delegator; import org.apache.ofbiz.entity.GenericValue; import org.apache.ofbiz.entity.condition.EntityCondition; import org.apache.ofbiz.entity.util.EntityQuery; import org.apache.ofbiz.service.GenericServiceException; import org.apache.ofbiz.service.LocalDispatcher; import org.apache.ofbiz.service.ServiceUtil; import org.apache.ofbiz.webapp.WebAppUtil; import org.eclipse.birt.report.engine.api.EXCELRenderOption; import org.eclipse.birt.report.engine.api.EngineConfig; import org.eclipse.birt.report.engine.api.EngineException; import org.eclipse.birt.report.engine.api.HTMLRenderOption; import org.eclipse.birt.report.engine.api.HTMLServerImageHandler; import org.eclipse.birt.report.engine.api.IPDFRenderOption; import org.eclipse.birt.report.engine.api.IReportEngine; import org.eclipse.birt.report.engine.api.IReportRunnable; import org.eclipse.birt.report.engine.api.IRunAndRenderTask; import org.eclipse.birt.report.engine.api.PDFRenderOption; import org.eclipse.birt.report.engine.api.RenderOption; public final class BirtWorker { public final static String module = BirtWorker.class.getName(); private final static String BIRT_PARAMETERS = "birtParameters"; private final static String BIRT_LOCALE = "birtLocale"; private final static String BIRT_IMAGE_DIRECTORY = "birtImageDirectory"; private final static String BIRT_CONTENT_TYPE = "birtContentType"; private final static String BIRT_OUTPUT_FILE_NAME = "birtOutputFileName"; private static final String resourceError = "BirtErrorUiLabels"; private final static HTMLServerImageHandler imageHandler = new HTMLServerImageHandler(); private BirtWorker() {} public static final Map<Integer, Level> levelIntMap = new HashMap<>(); static { levelIntMap.put(Debug.ERROR, Level.SEVERE); levelIntMap.put(Debug.TIMING, Level.FINE); levelIntMap.put(Debug.INFO, Level.INFO); levelIntMap.put(Debug.IMPORTANT, Level.INFO); levelIntMap.put(Debug.WARNING, Level.WARNING); levelIntMap.put(Debug.ERROR, Level.SEVERE); levelIntMap.put(Debug.FATAL, Level.ALL); levelIntMap.put(Debug.ALWAYS, Level.ALL); } /** * export report * @param design * @param context * @param contentType * @param output * @throws EngineException * @throws GeneralException * @throws SQLException */ public static void exportReport(IReportRunnable design, Map<String, ? extends Object> context, String contentType, OutputStream output) throws EngineException, GeneralException, SQLException { Locale birtLocale = (Locale) context.get(BIRT_LOCALE); String birtImageDirectory = (String) context.get(BIRT_IMAGE_DIRECTORY); if (contentType == null) { contentType = "text/html"; } else { contentType = contentType.toLowerCase(); } if (birtImageDirectory == null) { birtImageDirectory = "/"; } Debug.logInfo("Get report engine", module); IReportEngine engine = BirtFactory.getReportEngine(); IRunAndRenderTask task = engine.createRunAndRenderTask(design); if (birtLocale != null) { Debug.logInfo("Set BIRT locale:" + birtLocale, module); task.setLocale(birtLocale); } // set parameters if exists Map<String, Object> parameters = UtilGenerics.cast(context.get(BirtWorker.getBirtParameters())); if (parameters != null) { //Debug.logInfo("Set BIRT parameters:" + parameters, module); task.setParameterValues(parameters); } // set output options if (!BirtUtil.isSupportedMimeType(contentType)) { throw new GeneralException("Unknown content type : " + contentType); } RenderOption options = new RenderOption(); options.setOutputFormat(BirtUtil.getMimeTypeOutputFormat(contentType)); //specific process for mimetype if ("text/html".equals(contentType)) { // HTML HTMLRenderOption htmlOptions = new HTMLRenderOption(options); htmlOptions.setImageDirectory(birtImageDirectory); htmlOptions.setBaseImageURL(birtImageDirectory); options.setImageHandler(imageHandler); } else if ("application/pdf".equals(contentType)) { // PDF PDFRenderOption pdfOptions = new PDFRenderOption(options); pdfOptions.setOption(IPDFRenderOption.PAGE_OVERFLOW, Boolean.TRUE); } else if ("application/vnd.ms-excel".equals(contentType)) { // MS Excel new EXCELRenderOption(options); } options.setOutputStream(output); task.setRenderOption(options); // run report if (Debug.infoOn()) { Debug.logInfo("BIRT's locale is: " + task.getLocale(), module); Debug.logInfo("Run report's task", module); } task.run(); task.close(); } /** * set web context objects * @param appContext * @param request * @param response */ public static void setWebContextObjects(Map<String, Object> appContext, HttpServletRequest request, HttpServletResponse response) throws GeneralException { HttpSession session = request.getSession(); ServletContext servletContext = session.getServletContext(); if (appContext == null || servletContext == null) { throw new GeneralException("The context reporting is empty, check your configuration"); } // initialize the delegator appContext.put("delegator", WebAppUtil.getDelegator(servletContext)); // initialize security appContext.put("security", WebAppUtil.getSecurity(servletContext)); // initialize the services dispatcher appContext.put("dispatcher", WebAppUtil.getDispatcher(servletContext)); } public static String getBirtParameters () { return BIRT_PARAMETERS; } public static String getBirtLocale () { return BIRT_LOCALE; } public static String getBirtImageDirectory () { return BIRT_IMAGE_DIRECTORY; } public static String getBirtContentType () { return BIRT_CONTENT_TYPE; } public static String getBirtOutputFileName () { return BIRT_OUTPUT_FILE_NAME; } //TODO documentation public static String recordReportContent(Delegator delegator, LocalDispatcher dispatcher, Map<String, Object> context) throws GeneralException { Locale locale = (Locale) context.get("locale"); String description = (String) context.get("description"); String reportName = (String) context.get("reportName"); String writeFilters = (String) context.get("writeFilters"); GenericValue userLogin = (GenericValue) context.get("userLogin"); String entityViewName = (String) context.get("entityViewName"); String serviceName = (String) context.get("serviceName"); String masterContentId = (String) context.get("masterContentId"); String dataResourceId = delegator.getNextSeqId("DataResource"); String contentId = delegator.getNextSeqId("Content"); context.put("contentId", contentId); if (UtilValidate.isEmpty(serviceName) && UtilValidate.isEmpty(entityViewName)) { throw new GenericServiceException("Service and entity name cannot be both empty"); } String modelType = null; String modelElementName = null; String workflowType = null; if (UtilValidate.isEmpty(serviceName)) { modelElementName = entityViewName; workflowType = "Entity"; } else { modelElementName = serviceName; workflowType = "Service"; } //resolve the path location to store the RptDesign file, check if the file already exists under this name and increment index name if needed List<GenericValue> listRptDesigns = null; EntityCondition entityConditionRpt = EntityCondition.makeCondition("contentTypeId", "RPTDESIGN"); String templatePathLocation = BirtUtil.resolveTemplatePathLocation(); File templatePathLocationDir = new File(templatePathLocation); if (!templatePathLocationDir.exists()) { boolean created = templatePathLocationDir.mkdirs(); if (!created) { new GeneralException(UtilProperties.getMessage(resourceError, "BirtErrorCannotLocateReportFolder", locale)); } } int i = 0; String templateFileLocation = null; EntityCondition ecl = null; do { StringBuffer rptDesignNameSb = new StringBuffer(templatePathLocation); rptDesignNameSb.append(BirtUtil.encodeReportName(reportName)); rptDesignNameSb.append("_").append(i); rptDesignNameSb.append(".rptdesign"); templateFileLocation = rptDesignNameSb.toString(); EntityCondition entityConditionOnName = EntityCondition.makeCondition("drObjectInfo", templateFileLocation); ecl = EntityCondition.makeCondition(UtilMisc.toList(entityConditionRpt, entityConditionOnName)); i++; } while (EntityQuery.use(delegator).from("ContentDataResourceView").where(ecl).queryCount() > 0); //resolve the initial form structure from master content Map<String, Object> resultElectronicText = dispatcher.runSync("getElectronicText", UtilMisc.toMap("contentId", masterContentId, "locale", locale, "userLogin", userLogin)); if (ServiceUtil.isError(resultElectronicText)) { new GeneralException(ServiceUtil.getErrorMessage(resultElectronicText)); } String reportForm = (String) resultElectronicText.get("textData"); if (!reportForm.startsWith("<?xml")) { StringBuffer xmlHeaderForm = new StringBuffer("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); xmlHeaderForm.append("<forms xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:noNamespaceSchemaLocation=\"http://ofbiz.apache.org/dtds/widget-form.xsd\">"); xmlHeaderForm.append(reportForm); xmlHeaderForm.append("</forms>"); reportForm = xmlHeaderForm.toString(); } FlexibleStringExpander reportFormExpd = FlexibleStringExpander.getInstance(reportForm); reportForm = reportFormExpd.expandString(context); //create content and dataressource strucutre dispatcher.runSync("createDataResource", UtilMisc.toMap("dataResourceId", dataResourceId, "dataResourceTypeId", "ELECTRONIC_TEXT", "dataTemplateTypeId", "FORM_COMBINED", "userLogin", userLogin)); dispatcher.runSync("createElectronicTextForm", UtilMisc.toMap("dataResourceId", dataResourceId, "textData", reportForm, "userLogin", userLogin)); dispatcher.runSync("createContent", UtilMisc.toMap("contentId", contentId, "contentTypeId", "FLEXIBLE_REPORT", "dataResourceId", dataResourceId, "statusId", "CTNT_IN_PROGRESS", "contentName", reportName, "description", description, "userLogin", userLogin)); String dataResourceIdRpt = delegator.getNextSeqId("DataResource"); String contentIdRpt = delegator.getNextSeqId("Content"); String rptDesignName = BirtUtil.encodeReportName(reportName); if (!rptDesignName.endsWith(".rptdesign")) { rptDesignName = rptDesignName.concat(".rptdesign"); } dispatcher.runSync("createDataResource", UtilMisc.toMap("dataResourceId", dataResourceIdRpt, "dataResourceTypeId", "LOCAL_FILE", "mimeTypeId", "text/rptdesign", "dataResourceName", rptDesignName, "objectInfo", templateFileLocation, "userLogin", userLogin)); dispatcher.runSync("createContent", UtilMisc.toMap("contentId", contentIdRpt, "contentTypeId", "RPTDESIGN", "dataResourceId", dataResourceIdRpt, "statusId", "CTNT_PUBLISHED", "contentName", reportName, "description", description + " (.rptDesign file)", "userLogin", userLogin)); dispatcher.runSync("createContentAssoc", UtilMisc.toMap("contentId", masterContentId, "contentIdTo", contentId, "contentAssocTypeId", "SUB_CONTENT", "userLogin", userLogin)); dispatcher.runSync("createContentAssoc", UtilMisc.toMap("contentId", contentId, "contentIdTo", contentIdRpt, "contentAssocTypeId", "SUB_CONTENT", "userLogin", userLogin)); dispatcher.runSync("createContentAttribute", UtilMisc.toMap("contentId", contentId, "attrName", workflowType, "attrValue", modelElementName, "userLogin", userLogin)); return contentId; } /** * initialize configuration log with the low level present on debug.properties * @param config */ public static void setLogConfig(EngineConfig config) { String ofbizHome = System.getProperty("ofbiz.home"); int lowerLevel = 0; //resolve the lower level open on debug.properties, maybe it's better to implement correctly log4j here for (int i = 1; i < 7; i++) { if (Debug.isOn(i)) { lowerLevel = i; break; } } config.setLogConfig(UtilProperties.getPropertyValue("debug", "log4j.appender.css.dir", ofbizHome + "/runtime/logs/"), levelIntMap.get(lowerLevel)); } }
package org.jabref.logic.util; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; import kong.unirest.json.JSONArray; import kong.unirest.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Represents the Application Version with the major and minor number, the full Version String and if it's a developer version */ public class Version { public static final String JABREF_DOWNLOAD_URL = "https://downloads.jabref.org"; private static final Logger LOGGER = LoggerFactory.getLogger(Version.class); private static final Version UNKNOWN_VERSION = new Version(); private final static Pattern VERSION_PATTERN = Pattern.compile("(?<major>\\d+)(\\.(?<minor>\\d+))?(\\.(?<patch>\\d+))?(?<stage>-alpha|-beta)?(?<dev>-?dev)?.*"); private final static Pattern CI_SUFFIX_PATTERN = Pattern.compile("-ci\\.\\d+"); private static final String JABREF_GITHUB_RELEASES = "https://api.github.com/repos/JabRef/JabRef/releases"; private String fullVersion = BuildInfo.UNKNOWN_VERSION; private int major = -1; private int minor = -1; private int patch = -1; private DevelopmentStage developmentStage = DevelopmentStage.UNKNOWN; private boolean isDevelopmentVersion; /** * Dummy constructor to create a local object (and {@link Version#UNKNOWN_VERSION}) */ private Version() { } /** * @param version must be in form of following pattern: {@code (\d+)(\.(\d+))?(\.(\d+))?(-alpha|-beta)?(-?dev)?} (e.g., 3.3; 3.4-dev) * @return the parsed version or {@link Version#UNKNOWN_VERSION} if an error occurred */ public static Version parse(String version) { if ((version == null) || "".equals(version) || version.equals(BuildInfo.UNKNOWN_VERSION) || "${version}".equals(version)) { return UNKNOWN_VERSION; } Version parsedVersion = new Version(); // remove "-ci.1" suffix Matcher ciSuffixMatcher = CI_SUFFIX_PATTERN.matcher(version); version = ciSuffixMatcher.replaceAll(""); parsedVersion.fullVersion = version; Matcher matcher = VERSION_PATTERN.matcher(version); if (matcher.find()) { try { parsedVersion.major = Integer.parseInt(matcher.group("major")); String minorString = matcher.group("minor"); parsedVersion.minor = minorString == null ? 0 : Integer.parseInt(minorString); String patchString = matcher.group("patch"); parsedVersion.patch = patchString == null ? 0 : Integer.parseInt(patchString); String versionStageString = matcher.group("stage"); parsedVersion.developmentStage = versionStageString == null ? DevelopmentStage.STABLE : DevelopmentStage.parse(versionStageString); parsedVersion.isDevelopmentVersion = matcher.group("dev") != null; } catch (NumberFormatException e) { LOGGER.warn("Invalid version string used: " + version, e); return UNKNOWN_VERSION; } catch (IllegalArgumentException e) { LOGGER.warn("Invalid version pattern is used", e); return UNKNOWN_VERSION; } } else { LOGGER.warn("Version could not be recognized by the pattern"); return UNKNOWN_VERSION; } return parsedVersion; } /** * Grabs all the available releases from the GitHub repository */ public static List<Version> getAllAvailableVersions() throws IOException { HttpURLConnection connection = (HttpURLConnection) new URL(JABREF_GITHUB_RELEASES).openConnection(); connection.setRequestProperty("Accept-Charset", "UTF-8"); try (BufferedReader rd = new BufferedReader(new InputStreamReader(connection.getInputStream()))) { JSONArray objects = new JSONArray(rd.readLine()); List<Version> versions = new ArrayList<>(objects.length()); for (int i = 0; i < objects.length(); i++) { JSONObject jsonObject = objects.getJSONObject(i); Version version = Version.parse(jsonObject.getString("tag_name").replaceFirst("v", "")); versions.add(version); } connection.disconnect(); return versions; } } /** * @return true if this version is newer than the passed one */ public boolean isNewerThan(Version otherVersion) { Objects.requireNonNull(otherVersion); if (Objects.equals(this, otherVersion)) { return false; } else if (this.getFullVersion().equals(BuildInfo.UNKNOWN_VERSION)) { return false; } else if (otherVersion.getFullVersion().equals(BuildInfo.UNKNOWN_VERSION)) { return false; } // compare the majors if (this.getMajor() > otherVersion.getMajor()) { return true; } else if (this.getMajor() == otherVersion.getMajor()) { // if the majors are equal compare the minors if (this.getMinor() > otherVersion.getMinor()) { return true; } else if (this.getMinor() == otherVersion.getMinor()) { // if the minors are equal compare the patch numbers if (this.getPatch() > otherVersion.getPatch()) { return true; } else if (this.getPatch() == otherVersion.getPatch()) { // if the patch numbers are equal compare the development stages if (this.developmentStage.isMoreStableThan(otherVersion.developmentStage)) { return true; } else if (this.developmentStage == otherVersion.developmentStage) { // if the stage is equal check if this version is in development and the other is not return !this.isDevelopmentVersion && otherVersion.isDevelopmentVersion; } } } } return false; } /** * Checks if this version should be updated to one of the given ones. * Ignoring the other Version if this one is Stable and the other one is not. * * @return The version this one should be updated to, or an empty Optional */ public Optional<Version> shouldBeUpdatedTo(List<Version> availableVersions) { Optional<Version> newerVersion = Optional.empty(); for (Version version : availableVersions) { if (this.shouldBeUpdatedTo(version) && (!newerVersion.isPresent() || version.isNewerThan(newerVersion.get()))) { newerVersion = Optional.of(version); } } return newerVersion; } /** * Checks if this version should be updated to the given one. * Ignoring the other Version if this one is Stable and the other one is not. * * @return True if this version should be updated to the given one */ public boolean shouldBeUpdatedTo(Version otherVersion) { // ignoring the other version if it is not stable, except if this version itself is not stable if (developmentStage == Version.DevelopmentStage.STABLE && otherVersion.developmentStage != Version.DevelopmentStage.STABLE) { return false; } // check if the other version is newer than given one return otherVersion.isNewerThan(this); } public String getFullVersion() { return fullVersion; } public int getMajor() { return major; } public int getMinor() { return minor; } public int getPatch() { return patch; } public boolean isDevelopmentVersion() { return isDevelopmentVersion; } /** * @return The link to the changelog on GitHub to this specific version (https://github.com/JabRef/jabref/blob/vX.X/CHANGELOG.md) */ public String getChangelogUrl() { if (isDevelopmentVersion) { return "https://github.com/JabRef/jabref/blob/master/CHANGELOG.md#unreleased"; } else { StringBuilder changelogLink = new StringBuilder() .append("https://github.com/JabRef/jabref/blob/v") .append(this.getMajor()) .append(".") .append(this.getMinor()); if (this.getPatch() != 0) { changelogLink .append(".") .append(this.getPatch()); } changelogLink .append(this.developmentStage.stage) .append("/CHANGELOG.md"); return changelogLink.toString(); } } @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof Version)) { return false; } // till all the information are stripped from the fullversion this should suffice return this.getFullVersion().equals(((Version) other).getFullVersion()); } @Override public int hashCode() { return getFullVersion().hashCode(); } @Override public String toString() { return this.getFullVersion(); } public enum DevelopmentStage { UNKNOWN("", 0), ALPHA("-alpha", 1), BETA("-beta", 2), STABLE("", 3); /** * describes how stable this stage is, the higher the better */ private final int stability; private final String stage; DevelopmentStage(String stage, int stability) { this.stage = stage; this.stability = stability; } public static DevelopmentStage parse(String stage) { if (stage == null) { LOGGER.warn("The stage cannot be null"); return UNKNOWN; } else if (stage.equals(STABLE.stage)) { return STABLE; } else if (stage.equals(ALPHA.stage)) { return ALPHA; } else if (stage.equals(BETA.stage)) { return BETA; } LOGGER.warn("Unknown development stage: {}", stage); return UNKNOWN; } /** * @return true if this stage is more stable than the {@code otherStage} */ public boolean isMoreStableThan(DevelopmentStage otherStage) { return this.stability > otherStage.stability; } } }
/* * Copyright 2013 Guidewire Software, Inc. */ package gw.internal.gosu.ir.compiler.bytecode; import gw.internal.gosu.ir.compiler.bytecode.expression.IRArithmeticExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRArrayLengthExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRArrayLoadExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRBooleanLiteralCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRCastExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRCharacterLiteralCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRClassLiteralCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRCompositeExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRConditionalAndExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRConditionalOrExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IREqualityExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRFieldGetExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRIdentifierCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRInstanceOfExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRMethodCallExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRNegationExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRNewArrayExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRNewExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRNewMultiDimensionalArrayExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRNotExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRNullLiteralCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRNumericLiteralCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRPrimitiveTypeConversionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRRelationalExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRStringLiteralExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.expression.IRTernaryExpressionCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRArrayStoreStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRAssignmentStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRBreakStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRContinueStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRDoWhileStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IREvalStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRFieldSetStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRForEachStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRIfStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRMethodCallStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRMonitorLockAcquireCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRMonitorLockReleaseCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRNewStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRReturnStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRStatementListCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRSwitchStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRSyntheticStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRThrowStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRTryCatchFinallyStatementCompiler; import gw.internal.gosu.ir.compiler.bytecode.statement.IRWhileStatementCompiler; import gw.lang.ir.IRElement; import gw.lang.ir.IRExpression; import gw.lang.ir.IRStatement; import gw.lang.ir.expression.IRArithmeticExpression; import gw.lang.ir.expression.IRArrayLengthExpression; import gw.lang.ir.expression.IRArrayLoadExpression; import gw.lang.ir.expression.IRBooleanLiteral; import gw.lang.ir.expression.IRCastExpression; import gw.lang.ir.expression.IRCharacterLiteral; import gw.lang.ir.expression.IRClassLiteral; import gw.lang.ir.expression.IRCompositeExpression; import gw.lang.ir.expression.IRConditionalAndExpression; import gw.lang.ir.expression.IRConditionalOrExpression; import gw.lang.ir.expression.IREqualityExpression; import gw.lang.ir.expression.IRFieldGetExpression; import gw.lang.ir.expression.IRIdentifier; import gw.lang.ir.expression.IRInstanceOfExpression; import gw.lang.ir.expression.IRMethodCallExpression; import gw.lang.ir.expression.IRNegationExpression; import gw.lang.ir.expression.IRNewArrayExpression; import gw.lang.ir.expression.IRNewExpression; import gw.lang.ir.expression.IRNewMultiDimensionalArrayExpression; import gw.lang.ir.expression.IRNoOpExpression; import gw.lang.ir.expression.IRNotExpression; import gw.lang.ir.expression.IRNullLiteral; import gw.lang.ir.expression.IRNumericLiteral; import gw.lang.ir.expression.IRPrimitiveTypeConversion; import gw.lang.ir.expression.IRRelationalExpression; import gw.lang.ir.expression.IRStringLiteralExpression; import gw.lang.ir.expression.IRTernaryExpression; import gw.lang.ir.statement.IRArrayStoreStatement; import gw.lang.ir.statement.IRAssignmentStatement; import gw.lang.ir.statement.IRBreakStatement; import gw.lang.ir.statement.IRContinueStatement; import gw.lang.ir.statement.IRDoWhileStatement; import gw.lang.ir.statement.IREvalStatement; import gw.lang.ir.statement.IRFieldSetStatement; import gw.lang.ir.statement.IRForEachStatement; import gw.lang.ir.statement.IRIfStatement; import gw.lang.ir.statement.IRMethodCallStatement; import gw.lang.ir.statement.IRMonitorLockAcquireStatement; import gw.lang.ir.statement.IRMonitorLockReleaseStatement; import gw.lang.ir.statement.IRNewStatement; import gw.lang.ir.statement.IRNoOpStatement; import gw.lang.ir.statement.IRReturnStatement; import gw.lang.ir.statement.IRStatementList; import gw.lang.ir.statement.IRSwitchStatement; import gw.lang.ir.statement.IRSyntheticStatement; import gw.lang.ir.statement.IRThrowStatement; import gw.lang.ir.statement.IRTryCatchFinallyStatement; import gw.lang.ir.statement.IRWhileStatement; public class IRBytecodeCompiler { public static void compileIRElement(IRElement element, IRBytecodeContext context) { if (element instanceof IRStatement) { compileIRStatement((IRStatement) element, context); } else { compileIRExpression((IRExpression) element, context); } } public static void compileIRStatement(IRStatement statement, IRBytecodeContext context) { if (statement == null) { return; } context.setLineNumber( statement.getLineNumber() ); if (statement instanceof IRAssignmentStatement) { IRAssignmentStatementCompiler.compile((IRAssignmentStatement) statement, context); } else if (statement instanceof IRFieldSetStatement) { IRFieldSetStatementCompiler.compile((IRFieldSetStatement) statement, context); } else if (statement instanceof IRIfStatement) { IRIfStatementCompiler.compile((IRIfStatement) statement, context); } else if (statement instanceof IRMethodCallStatement) { IRMethodCallStatementCompiler.compile((IRMethodCallStatement) statement, context); } else if (statement instanceof IRNewStatement ) { IRNewStatementCompiler.compile( (IRNewStatement)statement, context ); } else if (statement instanceof IRNoOpStatement) { // Do nothing } else if (statement instanceof IRReturnStatement) { IRReturnStatementCompiler.compile((IRReturnStatement) statement, context); } else if (statement instanceof IRStatementList) { IRStatementListCompiler.compile((IRStatementList) statement, context); } else if (statement instanceof IRArrayStoreStatement) { IRArrayStoreStatementCompiler.compile((IRArrayStoreStatement) statement, context); } else if (statement instanceof IRThrowStatement) { IRThrowStatementCompiler.compile((IRThrowStatement) statement, context); } else if (statement instanceof IRForEachStatement ) { IRForEachStatementCompiler.compile((IRForEachStatement) statement, context); } else if (statement instanceof IRWhileStatement ) { IRWhileStatementCompiler.compile((IRWhileStatement) statement, context); } else if (statement instanceof IRDoWhileStatement ) { IRDoWhileStatementCompiler.compile((IRDoWhileStatement) statement, context); } else if (statement instanceof IRBreakStatement ) { IRBreakStatementCompiler.compile((IRBreakStatement) statement, context); } else if (statement instanceof IRContinueStatement) { IRContinueStatementCompiler.compile((IRContinueStatement) statement, context); } else if (statement instanceof IRTryCatchFinallyStatement) { IRTryCatchFinallyStatementCompiler.compile((IRTryCatchFinallyStatement) statement, context); } else if (statement instanceof IRMonitorLockAcquireStatement ) { IRMonitorLockAcquireCompiler.compile((IRMonitorLockAcquireStatement) statement, context); } else if (statement instanceof IRMonitorLockReleaseStatement ) { IRMonitorLockReleaseCompiler.compile((IRMonitorLockReleaseStatement) statement, context); } else if (statement instanceof IRSyntheticStatement) { IRSyntheticStatementCompiler.compile((IRSyntheticStatement) statement, context); } else if (statement instanceof IRSwitchStatement) { IRSwitchStatementCompiler.compile((IRSwitchStatement) statement, context); } else if (statement instanceof IREvalStatement ) { IREvalStatementCompiler.compile((IREvalStatement) statement, context); } else { throw new IllegalArgumentException("Unrecognized statement of type " + statement.getClass()); } } public static void compileIRExpression(IRExpression expression, IRBytecodeContext context) { if (expression == null) { return; } int previousLineNumber = context.setLineNumber( expression.getLineNumber() ); try { if (expression instanceof IRArithmeticExpression) { IRArithmeticExpressionCompiler.compile((IRArithmeticExpression) expression, context); } else if (expression instanceof IRArrayLoadExpression) { IRArrayLoadExpressionCompiler.compile((IRArrayLoadExpression) expression, context); } else if (expression instanceof IRBooleanLiteral) { IRBooleanLiteralCompiler.compile((IRBooleanLiteral) expression, context); } else if (expression instanceof IRCompositeExpression) { IRCompositeExpressionCompiler.compile((IRCompositeExpression) expression, context); } else if (expression instanceof IREqualityExpression) { IREqualityExpressionCompiler.compile((IREqualityExpression) expression, context); } else if (expression instanceof IRFieldGetExpression) { IRFieldGetExpressionCompiler.compile((IRFieldGetExpression) expression, context); } else if (expression instanceof IRIdentifier) { IRIdentifierCompiler.compile((IRIdentifier) expression, context); } else if (expression instanceof IRMethodCallExpression) { IRMethodCallExpressionCompiler.compile((IRMethodCallExpression) expression, context); } else if (expression instanceof IRNullLiteral) { IRNullLiteralCompiler.compile((IRNullLiteral) expression, context); } else if (expression instanceof IRPrimitiveTypeConversion) { IRPrimitiveTypeConversionCompiler.compile((IRPrimitiveTypeConversion) expression, context); } else if (expression instanceof IRTernaryExpression) { IRTernaryExpressionCompiler.compile((IRTernaryExpression) expression, context); } else if (expression instanceof IRNumericLiteral) { IRNumericLiteralCompiler.compile((IRNumericLiteral) expression, context); } else if (expression instanceof IRCharacterLiteral ) { IRCharacterLiteralCompiler.compile((IRCharacterLiteral) expression, context); } else if (expression instanceof IRStringLiteralExpression) { IRStringLiteralExpressionCompiler.compile((IRStringLiteralExpression) expression, context); } else if (expression instanceof IRNewArrayExpression) { IRNewArrayExpressionCompiler.compile((IRNewArrayExpression) expression, context); } else if (expression instanceof IRArrayLengthExpression) { IRArrayLengthExpressionCompiler.compile((IRArrayLengthExpression) expression, context); } else if (expression instanceof IRCastExpression) { IRCastExpressionCompiler.compile((IRCastExpression) expression, context); } else if (expression instanceof IRNewExpression) { IRNewExpressionCompiler.compile((IRNewExpression) expression, context); } else if (expression instanceof IRRelationalExpression) { IRRelationalExpressionCompiler.compile((IRRelationalExpression) expression, context); } else if (expression instanceof IRClassLiteral) { IRClassLiteralCompiler.compile((IRClassLiteral) expression, context); } else if (expression instanceof IRNegationExpression) { IRNegationExpressionCompiler.compile((IRNegationExpression) expression, context); } else if (expression instanceof IRNotExpression) { IRNotExpressionCompiler.compile((IRNotExpression) expression, context); } else if (expression instanceof IRConditionalAndExpression) { IRConditionalAndExpressionCompiler.compile((IRConditionalAndExpression) expression, context); } else if (expression instanceof IRConditionalOrExpression) { IRConditionalOrExpressionCompiler.compile((IRConditionalOrExpression) expression, context); } else if (expression instanceof IRInstanceOfExpression) { IRInstanceOfExpressionCompiler.compile((IRInstanceOfExpression) expression, context); } else if (expression instanceof IRNewMultiDimensionalArrayExpression) { IRNewMultiDimensionalArrayExpressionCompiler.compile((IRNewMultiDimensionalArrayExpression) expression, context); } else if (expression instanceof IRNoOpExpression) { // Do nothing } else { throw new IllegalArgumentException("Unrecognized expression of type " + expression.getClass()); } } finally { context.setLineNumber( previousLineNumber ); } } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.example.android.leanback; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.util.Log; import android.view.View; import androidx.core.content.res.ResourcesCompat; import androidx.leanback.widget.ArrayObjectAdapter; import androidx.leanback.widget.HeaderItem; import androidx.leanback.widget.ListRow; import androidx.leanback.widget.ListRowPresenter; import androidx.leanback.widget.OnItemViewClickedListener; import androidx.leanback.widget.Presenter; import androidx.leanback.widget.Row; import androidx.leanback.widget.RowPresenter; import java.util.Random; public class BrowseAnimationFragment extends androidx.leanback.app.BrowseFragment { private static final String TAG = "leanback.BrowseAnimationFragment"; private static final int NUM_ROWS = 10; private ArrayObjectAdapter mRowsAdapter; private static Random sRand = new Random(); static class Item { final String mText; final OnItemViewClickedListener mAction; Item(String text, OnItemViewClickedListener action) { mText = text; mAction = action; } @Override public String toString() { return mText; } } @Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "onCreate"); super.onCreate(savedInstanceState); setBadgeDrawable(ResourcesCompat.getDrawable(getActivity().getResources(), R.drawable.ic_title, getActivity().getTheme())); setTitle("Leanback Sample App"); setHeadersState(HEADERS_ENABLED); setOnSearchClickedListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(getActivity(), SearchActivity.class); startActivity(intent); } }); setupRows(); setOnItemViewClickedListener(new ItemViewClickedListener()); } private void setupRows() { ListRowPresenter lrp = new ListRowPresenter(); mRowsAdapter = new ArrayObjectAdapter(lrp); for (int i = 0; i < NUM_ROWS; ++i) { mRowsAdapter.add( createRandomRow(new HeaderItem(i, "Row " + i))); } setAdapter(mRowsAdapter); } Item createRandomItem() { switch (sRand.nextInt(15)) { default: case 0: return new Item("Remove Item before", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { ArrayObjectAdapter adapter = ((ArrayObjectAdapter) ((ListRow) row) .getAdapter()); int index = adapter.indexOf(item); if (index >= 0) { if (index > 0) index--; adapter.removeItems(index, 1); } } }); case 1: return new Item("Remove Item after", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { ArrayObjectAdapter adapter = ((ArrayObjectAdapter) ((ListRow) row) .getAdapter()); int index = adapter.indexOf(item); if (index >= 0) { if (index < adapter.size() - 1) index++; adapter.removeItems(index, 1); } } }); case 2: return new Item("Remove Item", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { ArrayObjectAdapter adapter = ((ArrayObjectAdapter) ((ListRow) row) .getAdapter()); int index = adapter.indexOf(item); if (index >= 0) { adapter.removeItems(index, 1); } } }); case 3: return new Item("Remove all Items", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { ArrayObjectAdapter adapter = ((ArrayObjectAdapter) ((ListRow) row) .getAdapter()); adapter.clear(); } }); case 4: return new Item("add item before", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { ArrayObjectAdapter adapter = ((ArrayObjectAdapter) ((ListRow) row) .getAdapter()); int index = adapter.indexOf(item); if (index >= 0) { adapter.add(index, createRandomItem()); } } }); case 5: return new Item("add item after", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { ArrayObjectAdapter adapter = ((ArrayObjectAdapter) ((ListRow) row) .getAdapter()); int index = adapter.indexOf(item); if (index >= 0) { adapter.add(index + 1, createRandomItem()); } } }); case 6: return new Item("add random items before", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { ArrayObjectAdapter adapter = ((ArrayObjectAdapter) ((ListRow) row) .getAdapter()); int index = adapter.indexOf(item); if (index >= 0) { int count = sRand.nextInt(4) + 1; for (int i = 0; i < count; i++) { adapter.add(index + i, createRandomItem()); } } } }); case 7: return new Item("add random items after", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { ArrayObjectAdapter adapter = ((ArrayObjectAdapter) ((ListRow) row) .getAdapter()); int index = adapter.indexOf(item); if (index >= 0) { int count = sRand.nextInt(4) + 1; for (int i = 0; i < count; i++) { adapter.add(index + 1 + i, createRandomItem()); } } } }); case 8: return new Item("add row before", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { int index = mRowsAdapter.indexOf(row); if (index >= 0) { int headerId = sRand.nextInt(); mRowsAdapter.add(index, createRandomRow(new HeaderItem( headerId, "Row " + headerId))); } } }); case 9: return new Item("add row after", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { int index = mRowsAdapter.indexOf(row); if (index >= 0) { int headerId = sRand.nextInt(); mRowsAdapter.add( index + 1, createRandomRow(new HeaderItem( headerId, "Row " + headerId))); } } }); case 10: return new Item("delete row", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { mRowsAdapter.remove(row); } }); case 11: return new Item("delete row before", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { int index = mRowsAdapter.indexOf(row); if (index > 0) { mRowsAdapter.removeItems(index - 1, 1); } } }); case 12: return new Item("delete row after", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { int index = mRowsAdapter.indexOf(row); if (index < mRowsAdapter.size() - 1) { mRowsAdapter.removeItems(index + 1, 1); } } }); case 13: return new Item("Replace Item before", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { ArrayObjectAdapter adapter = ((ArrayObjectAdapter) ((ListRow) row) .getAdapter()); int index = adapter.indexOf(item); if (index >= 0) { if (index > 0) index--; adapter.replace(index, createRandomItem()); } } }); case 14: return new Item("Remove all then re-add", new OnItemViewClickedListener() { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { final ArrayObjectAdapter adapter = ((ArrayObjectAdapter) ((ListRow) row) .getAdapter()); adapter.clear(); new Handler().postDelayed(new Runnable() { @Override public void run() { adapter.add(0, createRandomItem()); } }, 1000); } }); } } ListRow createRandomRow(HeaderItem header) { ArrayObjectAdapter listRowAdapter = new ArrayObjectAdapter( new StringPresenter()); for (int i = 0; i < 8; i++) { listRowAdapter.add(createRandomItem()); } return new ListRow(header, listRowAdapter); } private static final class ItemViewClickedListener implements OnItemViewClickedListener { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { ((Item) item).mAction.onItemClicked(itemViewHolder, item, rowViewHolder, row); } } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.pubsub.spi.v1; import com.google.api.gax.core.PagedListResponse; import com.google.api.gax.grpc.ApiCallable; import com.google.api.gax.protobuf.PathTemplate; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.protobuf.Empty; import com.google.pubsub.v1.DeleteTopicRequest; import com.google.pubsub.v1.GetTopicRequest; import com.google.pubsub.v1.ListTopicSubscriptionsRequest; import com.google.pubsub.v1.ListTopicSubscriptionsResponse; import com.google.pubsub.v1.ListTopicsRequest; import com.google.pubsub.v1.ListTopicsResponse; import com.google.pubsub.v1.PublishRequest; import com.google.pubsub.v1.PublishResponse; import com.google.pubsub.v1.PubsubMessage; import com.google.pubsub.v1.Topic; import io.grpc.ManagedChannel; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ScheduledExecutorService; // AUTO-GENERATED DOCUMENTATION AND SERVICE /** * Service Description: The service that an application uses to manipulate topics, and to send * messages to a topic. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre> * <code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedName = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * Topic response = publisherApi.createTopic(formattedName); * } * </code> * </pre> * * <p>Note: close() needs to be called on the publisherApi object to clean up resources such as * threads. In the example above, try-with-resources is used, which automatically calls close(). * * <p>The surface of this class includes several types of Java methods for each of the API's * methods: * * <ol> * <li> A "flattened" method. With this type of method, the fields of the request type have been * converted into function parameters. It may be the case that not all fields are available as * parameters, and not every API method will have a flattened method entry point. * <li> A "request object" method. This type of method only takes one parameter, a request object, * which must be constructed before the call. Not every API method will have a request object * method. * <li> A "callable" method. This type of method takes no parameters and returns an immutable * ApiCallable object, which can be used to initiate calls to the service. * </ol> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of PublisherSettings to create(). * For example: * * <pre> * <code> * PublisherSettings publisherSettings = PublisherSettings.defaultBuilder() * .provideChannelWith(myCredentials) * .build(); * PublisherApi publisherApi = PublisherApi.create(publisherSettings); * </code> * </pre> */ @javax.annotation.Generated("by GAPIC") public class PublisherApi implements AutoCloseable { private final PublisherSettings settings; private final ManagedChannel channel; private final ScheduledExecutorService executor; private final List<AutoCloseable> closeables = new ArrayList<>(); private final ApiCallable<Topic, Topic> createTopicCallable; private final ApiCallable<PublishRequest, PublishResponse> publishCallable; private final ApiCallable<GetTopicRequest, Topic> getTopicCallable; private final ApiCallable<ListTopicsRequest, ListTopicsResponse> listTopicsCallable; private final ApiCallable< ListTopicsRequest, PagedListResponse<ListTopicsRequest, ListTopicsResponse, Topic>> listTopicsPagedCallable; private final ApiCallable<ListTopicSubscriptionsRequest, ListTopicSubscriptionsResponse> listTopicSubscriptionsCallable; private final ApiCallable< ListTopicSubscriptionsRequest, PagedListResponse<ListTopicSubscriptionsRequest, ListTopicSubscriptionsResponse, String>> listTopicSubscriptionsPagedCallable; private final ApiCallable<DeleteTopicRequest, Empty> deleteTopicCallable; private final ApiCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable; private final ApiCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable; private final ApiCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable; private static final PathTemplate PROJECT_PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding("projects/{project}"); private static final PathTemplate TOPIC_PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding("projects/{project}/topics/{topic}"); /** Formats a string containing the fully-qualified path to represent a project resource. */ public static final String formatProjectName(String project) { return PROJECT_PATH_TEMPLATE.instantiate("project", project); } /** Formats a string containing the fully-qualified path to represent a topic resource. */ public static final String formatTopicName(String project, String topic) { return TOPIC_PATH_TEMPLATE.instantiate( "project", project, "topic", topic); } /** Parses the project from the given fully-qualified path which represents a project resource. */ public static final String parseProjectFromProjectName(String projectName) { return PROJECT_PATH_TEMPLATE.parse(projectName).get("project"); } /** Parses the project from the given fully-qualified path which represents a topic resource. */ public static final String parseProjectFromTopicName(String topicName) { return TOPIC_PATH_TEMPLATE.parse(topicName).get("project"); } /** Parses the topic from the given fully-qualified path which represents a topic resource. */ public static final String parseTopicFromTopicName(String topicName) { return TOPIC_PATH_TEMPLATE.parse(topicName).get("topic"); } /** Constructs an instance of PublisherApi with default settings. */ public static final PublisherApi create() throws IOException { return create(PublisherSettings.defaultBuilder().build()); } /** * Constructs an instance of PublisherApi, using the given settings. The channels are created * based on the settings passed in, or defaults for any settings that are not set. */ public static final PublisherApi create(PublisherSettings settings) throws IOException { return new PublisherApi(settings); } /** * Constructs an instance of PublisherApi, using the given settings. This is protected so that it * easy to make a subclass, but otherwise, the static factory methods should be preferred. */ protected PublisherApi(PublisherSettings settings) throws IOException { this.settings = settings; this.executor = settings.getExecutorProvider().getOrBuildExecutor(); this.channel = settings.getChannelProvider().getOrBuildChannel(this.executor); this.createTopicCallable = ApiCallable.create(settings.createTopicSettings(), this.channel, this.executor); this.publishCallable = ApiCallable.create(settings.publishSettings(), this.channel, this.executor); if (settings.publishSettings().getBundlerFactory() != null) { closeables.add(settings.publishSettings().getBundlerFactory()); } this.getTopicCallable = ApiCallable.create(settings.getTopicSettings(), this.channel, this.executor); this.listTopicsCallable = ApiCallable.create(settings.listTopicsSettings(), this.channel, this.executor); this.listTopicsPagedCallable = ApiCallable.createPagedVariant(settings.listTopicsSettings(), this.channel, this.executor); this.listTopicSubscriptionsCallable = ApiCallable.create(settings.listTopicSubscriptionsSettings(), this.channel, this.executor); this.listTopicSubscriptionsPagedCallable = ApiCallable.createPagedVariant( settings.listTopicSubscriptionsSettings(), this.channel, this.executor); this.deleteTopicCallable = ApiCallable.create(settings.deleteTopicSettings(), this.channel, this.executor); this.setIamPolicyCallable = ApiCallable.create(settings.setIamPolicySettings(), this.channel, this.executor); this.getIamPolicyCallable = ApiCallable.create(settings.getIamPolicySettings(), this.channel, this.executor); this.testIamPermissionsCallable = ApiCallable.create(settings.testIamPermissionsSettings(), this.channel, this.executor); if (settings.getChannelProvider().shouldAutoClose()) { closeables.add( new Closeable() { @Override public void close() throws IOException { channel.shutdown(); } }); } if (settings.getExecutorProvider().shouldAutoClose()) { closeables.add( new Closeable() { @Override public void close() throws IOException { executor.shutdown(); } }); } } public final PublisherSettings getSettings() { return settings; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates the given topic with the given name. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedName = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * Topic response = publisherApi.createTopic(formattedName); * } * </code></pre> * * @param name The name of the topic. It must have the format * `"projects/{project}/topics/{topic}"`. `{topic}` must start with a letter, and contain only * letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), * tildes (`~`), plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters in * length, and it must not start with `"goog"`. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final Topic createTopic(String name) { TOPIC_PATH_TEMPLATE.validate(name, "createTopic"); Topic request = Topic.newBuilder().setName(name).build(); return createTopic(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates the given topic with the given name. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedName = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * Topic request = Topic.newBuilder() * .setName(formattedName) * .build(); * Topic response = publisherApi.createTopic(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ private final Topic createTopic(Topic request) { return createTopicCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates the given topic with the given name. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedName = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * Topic request = Topic.newBuilder() * .setName(formattedName) * .build(); * ListenableFuture&lt;Topic&gt; future = publisherApi.createTopicCallable().futureCall(request); * // Do something * Topic response = future.get(); * } * </code></pre> */ public final ApiCallable<Topic, Topic> createTopicCallable() { return createTopicCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The * message payload must not be empty; it must contain either a non-empty data field, or at least * one attribute. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * ByteString data = ByteString.copyFromUtf8(""); * PubsubMessage messagesElement = PubsubMessage.newBuilder() * .setData(data) * .build(); * List&lt;PubsubMessage&gt; messages = Arrays.asList(messagesElement); * PublishResponse response = publisherApi.publish(formattedTopic, messages); * } * </code></pre> * * @param topic The messages in the request will be published on this topic. * @param messages The messages to publish. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final PublishResponse publish(String topic, List<PubsubMessage> messages) { TOPIC_PATH_TEMPLATE.validate(topic, "publish"); PublishRequest request = PublishRequest.newBuilder().setTopic(topic).addAllMessages(messages).build(); return publish(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The * message payload must not be empty; it must contain either a non-empty data field, or at least * one attribute. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * ByteString data = ByteString.copyFromUtf8(""); * PubsubMessage messagesElement = PubsubMessage.newBuilder() * .setData(data) * .build(); * List&lt;PubsubMessage&gt; messages = Arrays.asList(messagesElement); * PublishRequest request = PublishRequest.newBuilder() * .setTopic(formattedTopic) * .addAllMessages(messages) * .build(); * PublishResponse response = publisherApi.publish(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final PublishResponse publish(PublishRequest request) { return publishCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The * message payload must not be empty; it must contain either a non-empty data field, or at least * one attribute. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * ByteString data = ByteString.copyFromUtf8(""); * PubsubMessage messagesElement = PubsubMessage.newBuilder() * .setData(data) * .build(); * List&lt;PubsubMessage&gt; messages = Arrays.asList(messagesElement); * PublishRequest request = PublishRequest.newBuilder() * .setTopic(formattedTopic) * .addAllMessages(messages) * .build(); * ListenableFuture&lt;PublishResponse&gt; future = publisherApi.publishCallable().futureCall(request); * // Do something * PublishResponse response = future.get(); * } * </code></pre> */ public final ApiCallable<PublishRequest, PublishResponse> publishCallable() { return publishCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets the configuration of a topic. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * Topic response = publisherApi.getTopic(formattedTopic); * } * </code></pre> * * @param topic The name of the topic to get. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final Topic getTopic(String topic) { TOPIC_PATH_TEMPLATE.validate(topic, "getTopic"); GetTopicRequest request = GetTopicRequest.newBuilder().setTopic(topic).build(); return getTopic(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets the configuration of a topic. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * GetTopicRequest request = GetTopicRequest.newBuilder() * .setTopic(formattedTopic) * .build(); * Topic response = publisherApi.getTopic(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ private final Topic getTopic(GetTopicRequest request) { return getTopicCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets the configuration of a topic. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * GetTopicRequest request = GetTopicRequest.newBuilder() * .setTopic(formattedTopic) * .build(); * ListenableFuture&lt;Topic&gt; future = publisherApi.getTopicCallable().futureCall(request); * // Do something * Topic response = future.get(); * } * </code></pre> */ public final ApiCallable<GetTopicRequest, Topic> getTopicCallable() { return getTopicCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists matching topics. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedProject = PublisherApi.formatProjectName("[PROJECT]"); * for (Topic element : publisherApi.listTopics(formattedProject).iterateAllElements()) { * // doThingsWith(element); * } * } * </code></pre> * * @param project The name of the cloud project that topics belong to. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final PagedListResponse<ListTopicsRequest, ListTopicsResponse, Topic> listTopics( String project) { PROJECT_PATH_TEMPLATE.validate(project, "listTopics"); ListTopicsRequest request = ListTopicsRequest.newBuilder().setProject(project).build(); return listTopics(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists matching topics. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedProject = PublisherApi.formatProjectName("[PROJECT]"); * ListTopicsRequest request = ListTopicsRequest.newBuilder() * .setProject(formattedProject) * .build(); * for (Topic element : publisherApi.listTopics(request).iterateAllElements()) { * // doThingsWith(element); * } * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final PagedListResponse<ListTopicsRequest, ListTopicsResponse, Topic> listTopics( ListTopicsRequest request) { return listTopicsPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists matching topics. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedProject = PublisherApi.formatProjectName("[PROJECT]"); * ListTopicsRequest request = ListTopicsRequest.newBuilder() * .setProject(formattedProject) * .build(); * ListenableFuture&lt;PagedListResponse&lt;ListTopicsRequest,ListTopicsResponse,Topic&gt;&gt; future = publisherApi.listTopicsPagedCallable().futureCall(request); * // Do something * for (Topic element : future.get().iterateAllElements()) { * // doThingsWith(element); * } * } * </code></pre> */ public final ApiCallable< ListTopicsRequest, PagedListResponse<ListTopicsRequest, ListTopicsResponse, Topic>> listTopicsPagedCallable() { return listTopicsPagedCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists matching topics. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedProject = PublisherApi.formatProjectName("[PROJECT]"); * ListTopicsRequest request = ListTopicsRequest.newBuilder() * .setProject(formattedProject) * .build(); * while (true) { * ListTopicsResponse response = publisherApi.listTopicsCallable().call(request); * for (Topic element : response.getTopicsList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * </code></pre> */ public final ApiCallable<ListTopicsRequest, ListTopicsResponse> listTopicsCallable() { return listTopicsCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists the name of the subscriptions for this topic. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * for (String element : publisherApi.listTopicSubscriptions(formattedTopic).iterateAllElements()) { * // doThingsWith(element); * } * } * </code></pre> * * @param topic The name of the topic that subscriptions are attached to. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final PagedListResponse< ListTopicSubscriptionsRequest, ListTopicSubscriptionsResponse, String> listTopicSubscriptions(String topic) { TOPIC_PATH_TEMPLATE.validate(topic, "listTopicSubscriptions"); ListTopicSubscriptionsRequest request = ListTopicSubscriptionsRequest.newBuilder().setTopic(topic).build(); return listTopicSubscriptions(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists the name of the subscriptions for this topic. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * ListTopicSubscriptionsRequest request = ListTopicSubscriptionsRequest.newBuilder() * .setTopic(formattedTopic) * .build(); * for (String element : publisherApi.listTopicSubscriptions(request).iterateAllElements()) { * // doThingsWith(element); * } * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final PagedListResponse< ListTopicSubscriptionsRequest, ListTopicSubscriptionsResponse, String> listTopicSubscriptions(ListTopicSubscriptionsRequest request) { return listTopicSubscriptionsPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists the name of the subscriptions for this topic. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * ListTopicSubscriptionsRequest request = ListTopicSubscriptionsRequest.newBuilder() * .setTopic(formattedTopic) * .build(); * ListenableFuture&lt;PagedListResponse&lt;ListTopicSubscriptionsRequest,ListTopicSubscriptionsResponse,String&gt;&gt; future = publisherApi.listTopicSubscriptionsPagedCallable().futureCall(request); * // Do something * for (String element : future.get().iterateAllElements()) { * // doThingsWith(element); * } * } * </code></pre> */ public final ApiCallable< ListTopicSubscriptionsRequest, PagedListResponse<ListTopicSubscriptionsRequest, ListTopicSubscriptionsResponse, String>> listTopicSubscriptionsPagedCallable() { return listTopicSubscriptionsPagedCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists the name of the subscriptions for this topic. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * ListTopicSubscriptionsRequest request = ListTopicSubscriptionsRequest.newBuilder() * .setTopic(formattedTopic) * .build(); * while (true) { * ListTopicSubscriptionsResponse response = publisherApi.listTopicSubscriptionsCallable().call(request); * for (String element : response.getSubscriptionsList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * </code></pre> */ public final ApiCallable<ListTopicSubscriptionsRequest, ListTopicSubscriptionsResponse> listTopicSubscriptionsCallable() { return listTopicSubscriptionsCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Deletes the topic with the given name. Returns `NOT_FOUND` if the topic does not exist. After a * topic is deleted, a new topic may be created with the same name; this is an entirely new topic * with none of the old configuration or subscriptions. Existing subscriptions to this topic are * not deleted, but their `topic` field is set to `_deleted-topic_`. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * publisherApi.deleteTopic(formattedTopic); * } * </code></pre> * * @param topic Name of the topic to delete. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final void deleteTopic(String topic) { TOPIC_PATH_TEMPLATE.validate(topic, "deleteTopic"); DeleteTopicRequest request = DeleteTopicRequest.newBuilder().setTopic(topic).build(); deleteTopic(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Deletes the topic with the given name. Returns `NOT_FOUND` if the topic does not exist. After a * topic is deleted, a new topic may be created with the same name; this is an entirely new topic * with none of the old configuration or subscriptions. Existing subscriptions to this topic are * not deleted, but their `topic` field is set to `_deleted-topic_`. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * DeleteTopicRequest request = DeleteTopicRequest.newBuilder() * .setTopic(formattedTopic) * .build(); * publisherApi.deleteTopic(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ private final void deleteTopic(DeleteTopicRequest request) { deleteTopicCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Deletes the topic with the given name. Returns `NOT_FOUND` if the topic does not exist. After a * topic is deleted, a new topic may be created with the same name; this is an entirely new topic * with none of the old configuration or subscriptions. Existing subscriptions to this topic are * not deleted, but their `topic` field is set to `_deleted-topic_`. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedTopic = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * DeleteTopicRequest request = DeleteTopicRequest.newBuilder() * .setTopic(formattedTopic) * .build(); * ListenableFuture&lt;Void&gt; future = publisherApi.deleteTopicCallable().futureCall(request); * // Do something * future.get(); * } * </code></pre> */ public final ApiCallable<DeleteTopicRequest, Empty> deleteTopicCallable() { return deleteTopicCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Sets the access control policy on the specified resource. Replaces any existing policy. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedResource = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * Policy policy = Policy.newBuilder().build(); * Policy response = publisherApi.setIamPolicy(formattedResource, policy); * } * </code></pre> * * @param resource REQUIRED: The resource for which policy is being specified. Resource is usually * specified as a path, such as, projects/{project}/zones/{zone}/disks/{disk}. * @param policy REQUIRED: The complete policy to be applied to the 'resource'. The size of the * policy is limited to a few 10s of KB. An empty policy is in general a valid policy but * certain services (like Projects) might reject them. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final Policy setIamPolicy(String resource, Policy policy) { TOPIC_PATH_TEMPLATE.validate(resource, "setIamPolicy"); SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder().setResource(resource).setPolicy(policy).build(); return setIamPolicy(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Sets the access control policy on the specified resource. Replaces any existing policy. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedResource = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * Policy policy = Policy.newBuilder().build(); * SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder() * .setResource(formattedResource) * .setPolicy(policy) * .build(); * Policy response = publisherApi.setIamPolicy(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final Policy setIamPolicy(SetIamPolicyRequest request) { return setIamPolicyCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Sets the access control policy on the specified resource. Replaces any existing policy. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedResource = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * Policy policy = Policy.newBuilder().build(); * SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder() * .setResource(formattedResource) * .setPolicy(policy) * .build(); * ListenableFuture&lt;Policy&gt; future = publisherApi.setIamPolicyCallable().futureCall(request); * // Do something * Policy response = future.get(); * } * </code></pre> */ public final ApiCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() { return setIamPolicyCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets the access control policy for a resource. Is empty if the policy or the resource does not * exist. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedResource = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * Policy response = publisherApi.getIamPolicy(formattedResource); * } * </code></pre> * * @param resource REQUIRED: The resource for which policy is being requested. Resource is usually * specified as a path, such as, projects/{project}. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final Policy getIamPolicy(String resource) { TOPIC_PATH_TEMPLATE.validate(resource, "getIamPolicy"); GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder().setResource(resource).build(); return getIamPolicy(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets the access control policy for a resource. Is empty if the policy or the resource does not * exist. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedResource = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder() * .setResource(formattedResource) * .build(); * Policy response = publisherApi.getIamPolicy(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ private final Policy getIamPolicy(GetIamPolicyRequest request) { return getIamPolicyCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets the access control policy for a resource. Is empty if the policy or the resource does not * exist. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedResource = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder() * .setResource(formattedResource) * .build(); * ListenableFuture&lt;Policy&gt; future = publisherApi.getIamPolicyCallable().futureCall(request); * // Do something * Policy response = future.get(); * } * </code></pre> */ public final ApiCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() { return getIamPolicyCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Returns permissions that a caller has on the specified resource. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedResource = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * List&lt;String&gt; permissions = new ArrayList&lt;&gt;(); * TestIamPermissionsResponse response = publisherApi.testIamPermissions(formattedResource, permissions); * } * </code></pre> * * @param resource REQUIRED: The resource for which policy detail is being requested. Resource is * usually specified as a path, such as, projects/{project}. * @param permissions The set of permissions to check for the 'resource'. Permissions with * wildcards (such as '&ast;' or 'storage.&ast;') are not allowed. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final TestIamPermissionsResponse testIamPermissions( String resource, List<String> permissions) { TOPIC_PATH_TEMPLATE.validate(resource, "testIamPermissions"); TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() .setResource(resource) .addAllPermissions(permissions) .build(); return testIamPermissions(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Returns permissions that a caller has on the specified resource. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedResource = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * List&lt;String&gt; permissions = new ArrayList&lt;&gt;(); * TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() * .setResource(formattedResource) * .addAllPermissions(permissions) * .build(); * TestIamPermissionsResponse response = publisherApi.testIamPermissions(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final TestIamPermissionsResponse testIamPermissions(TestIamPermissionsRequest request) { return testIamPermissionsCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Returns permissions that a caller has on the specified resource. * * <p>Sample code: * * <pre><code> * try (PublisherApi publisherApi = PublisherApi.create()) { * String formattedResource = PublisherApi.formatTopicName("[PROJECT]", "[TOPIC]"); * List&lt;String&gt; permissions = new ArrayList&lt;&gt;(); * TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() * .setResource(formattedResource) * .addAllPermissions(permissions) * .build(); * ListenableFuture&lt;TestIamPermissionsResponse&gt; future = publisherApi.testIamPermissionsCallable().futureCall(request); * // Do something * TestIamPermissionsResponse response = future.get(); * } * </code></pre> */ public final ApiCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable() { return testIamPermissionsCallable; } /** * Initiates an orderly shutdown in which preexisting calls continue but new calls are immediately * cancelled. */ @Override public final void close() throws Exception { for (AutoCloseable closeable : closeables) { closeable.close(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** A class to create and recreate UUIDs. * http://www.opengroup.org/onlinepubs/009629399/apdxa.htm */ package uuid.jena_original ; import java.util.Locale ; import org.apache.jena.atlas.lib.BitsLong ; import org.slf4j.LoggerFactory ; // TO DO // + Comments and renaming. // ? Move to/from string code here (string <=> pair of longs). // OK but unparse code makes explicit what goes where in the structures // parse/unparseV4 is the generic code. // UUID and factory /** <a ref="https://tools.ietf.org/html/rfc4122">RFC 4122</a> UUID. */ public abstract class JenaUUID { // Variants: we only support RFC 4122 whic is variant 2. // The code also assumes the variant is stored in 2 bits. //static final int Var_NCS = 0 ; static final int Var_Std = 2 ; // Two bits //static final int Var_DCE = 2 ; // Same as above //static final int Var_MS_GUID = 6 ; // Reserved. 3 bits //static final int Var_Reserved = 7 ; // Reserved for future definition. 3 bits. private final long mostSignificantBits; private final long leastSignificantBits; public int getVersion() { return _getVersion(mostSignificantBits, leastSignificantBits); } private int _getVersion(long mostSigBits, long leastSigBits) { return (int)BitsLong.unpack(mostSigBits, 12, 16); } public int getVariant() {return _getVariant(mostSignificantBits, leastSignificantBits); } private int _getVariant(long mostSigBits, long leastSigBits) { // This could be sensitive to the variant encoding. // https://tools.ietf.org/html/rfc4122#page-6 // if ( true ) // return (int)BitsLong.unpack(leastSigBits, 62, 64); //Variable length decoding. int b0 = (int)BitsLong.unpack(leastSigBits, 63, 64); if ( b0 == 0 ) // Bit pattern 0xx return 0; int b1 = (int)BitsLong.unpack(leastSigBits, 62, 63); if ( b1 == 0 ) // Bit pattern 10x - the normal UUID variant. return 2; int b2 = (int)BitsLong.unpack(leastSigBits, 61, 62); if ( b2 == 0 ) // 110 return 0x4; else // Bit pattern 1111 return 0x7; } public long getMostSignificantBits() { return mostSignificantBits; } public long getLeastSignificantBits() { return leastSignificantBits; } protected JenaUUID(long mostSigBits, long leastSigBits) { checkArgs(mostSigBits, leastSigBits); this.mostSignificantBits = mostSigBits; this.leastSignificantBits = leastSigBits; } private void checkArgs(long mostSigBits, long leastSigBits) { int _variant = _getVariant(mostSigBits, leastSigBits); int _version = _getVersion(mostSigBits, leastSigBits); if ( _variant == 0 && _version == 0 ) { if ( mostSigBits == 0 && leastSigBits == 0 ) return; String msg = String.format("Version = 0 : Expected: most and least significant to be 0: Got: %016x %016x", mostSigBits, leastSigBits); throw new IllegalArgumentException(msg); } if ( _variant == getImplVariant() && _version == getImplVersion() ) return; String msg = String.format("Version/variant error: Expected: (%d,%d); Got (%d,%d)", getImplVersion(), getImplVariant(), _version, _variant); throw new IllegalArgumentException(msg); } protected abstract int getImplVersion(); protected int getImplVariant() { return JenaUUID.Var_Std ; } /** Format as a string - no URI scheme **/ public String asString() { return toString() ; } /** Format as a URI - that is uuid:ABCD */ public String asURI() { return "uuid:" + toString() ; } /** Format as a URN - that is urn:uuid:ABCD */ public String asURN() { return "urn:uuid:" + toString() ; } /** Return a {@link java.util.UUID} for this Jena-generated UUID */ public java.util.UUID asUUID() { return new java.util.UUID(getMostSignificantBits(), getLeastSignificantBits()) ; } @Override public String toString() { return toString(this) ; } // Time low - which includes the incremental count. @Override public int hashCode() { return (int) BitsLong.unpack(getMostSignificantBits(), 32, 64) ; } @Override public boolean equals(Object other) { if ( this == other ) return true ; if ( other == null ) return false ; if ( ! ( other instanceof JenaUUID ) ) return false ; JenaUUID x = (JenaUUID)other ; return this.getMostSignificantBits() == x.getMostSignificantBits() && this.getLeastSignificantBits() == x.getLeastSignificantBits() ; } // ---------------------------------------------------- // Factory static UUIDFactory factory = new UUID_V1_Gen() ; public static void setFactory(UUIDFactory factory) { JenaUUID.factory = factory ; } public static UUIDFactory getFactory() { return factory ; } /** Create a UUID */ public static JenaUUID generate() { return factory.generate() ; } public static void reset() { factory.reset() ; } /** The nil UUID */ public static JenaUUID nil() { return UUID_nil.getNil() ; } public static String strNil() { return UUID_nil.getNilString() ; } public boolean isNil() { return this.equals(nil()) ; } // Or this == UUID_nil.nil because it's a singleton. /** Recreate a UUID from string */ public static JenaUUID parse(String s) { if ( s.equals(strNil()) ) return nil() ; // Canonical: this works in conjunction with .equals s = s.toLowerCase(Locale.ENGLISH) ; if ( s.startsWith("urn:") ) s = s.substring(4) ; if ( s.startsWith("uuid:") ) s = s.substring(5) ; if ( s.length() != 36 ) throw new UUIDFormatException("UUID string is not 36 chars long: it's " + s.length() + " [" + s + "]") ; if ( s.charAt(8) != '-' || s.charAt(13) != '-' || s.charAt(18) != '-' || s.charAt(23) != '-' ) throw new UUIDFormatException("String does not have dashes in the right places: " + s) ; // The UUID broken up into parts. // 00000000-0000-0000-0000-000000000000 // ^ ^ ^ ^ ^ // Byte: 0 4 6 8 10 // Char: 0 9 14 19 24 including hyphens int x = (int)BitsLong.unpack(s, 19, 23) ; int variant = (x >>> 14) ; int version = (int)BitsLong.unpack(s, 14, 15) ; if ( variant == Var_Std ) { switch (version) { case UUID_V1.UUID_ImplVersion: return UUID_V1_Gen.parse$(s) ; case UUID_V4.UUID_ImplVersion: return UUID_V4_Gen.parse$(s) ; } LoggerFactory.getLogger(JenaUUID.class).warn(s + " : Unsupported version: " + version) ; throw new UnsupportedOperationException("String specifies unsupported UUID version: " + version) ; } throw new UnsupportedOperationException("String specifies unsupported UUID variant: " + variant) ; } public static String toString(JenaUUID uuid) { return toString(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()) ; } /** Format using two longs - assumed valid for an UUID of some kind */ public static String toString(long mostSignificantBits, long leastSignificantBits) { StringBuffer sb = new StringBuffer(36) ; JenaUUID.toHex(sb, BitsLong.unpack(mostSignificantBits, 32, 64), 4) ; sb.append('-') ; JenaUUID.toHex(sb, BitsLong.unpack(mostSignificantBits, 16, 32), 2) ; sb.append('-') ; JenaUUID.toHex(sb, BitsLong.unpack(mostSignificantBits, 0, 16), 2) ; sb.append('-') ; JenaUUID.toHex(sb, BitsLong.unpack(leastSignificantBits, 48, 64), 2) ; sb.append('-') ; JenaUUID.toHex(sb, BitsLong.unpack(leastSignificantBits, 0, 48), 6) ; return sb.toString() ; } // ---------------------------------------------------- // Worker functions static void toHex(StringBuffer sBuff, long value, int lenBytes) { // Insert in high-low order, by nibble for (int i = 2 * lenBytes - 1; i >= 0; i--) { int shift = 4 * i ; int x = (int)(value >>> shift & 0xF) ; sBuff.append(Character.forDigit(x, 16)) ; } } static public class UUIDFormatException extends RuntimeException { public UUIDFormatException() { super() ; } public UUIDFormatException(String msg) { super(msg) ; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.collection.impl.cpm; import org.apache.uima.UIMAFramework; import org.apache.uima.collection.CollectionProcessingEngine; import org.apache.uima.collection.impl.cpm.utils.DescriptorMakeUtil; import org.apache.uima.collection.impl.cpm.utils.FunctionErrorStore; import org.apache.uima.collection.impl.cpm.utils.TestStatusCallbackListener; import org.apache.uima.collection.impl.metadata.cpe.CpeDescriptorFactory; import org.apache.uima.collection.metadata.CpeDescription; import org.apache.uima.collection.metadata.CpeIntegratedCasProcessor; import org.apache.uima.test.junit_extension.JUnitExtension; import org.junit.Assert; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; /** * This test aimes to check if the cpm implements the ProcessingUnitThreadCount in the correct * manner. That means that no matter which configuration and document number is chosen, every * document should be processed exactly one time. * */ public class CpmProcessingTest { private static final String separator = System.getProperties().getProperty("file.separator"); /** * @see junit.framework.TestCase#setUp() */ @BeforeEach public void setUp() throws Exception { // disable schema validation -- this test uses descriptors // that don't validate, for some reason UIMAFramework.getXMLParser().enableSchemaValidation(false); } @AfterEach public void tearDown() throws Exception { FunctionErrorStore.resetCount(); } /** * Create a single processor which have to work on only on document * * @throws Exception * - */ @Test public void testCasConsumerProcessingSingleThreadSingleDocument() throws Exception { // process only a single document and a single thread int documentCount = 1; int threadCount = 1; // setup CPM to process one document CollectionProcessingEngine cpe = setupCpm(documentCount, threadCount); // create and register a status callback listener TestStatusCallbackListener listener = new TestStatusCallbackListener(); cpe.addStatusCallbackListener(listener); // run CPM cpe.process(); // wait until CPM has finished while (!listener.isFinished()) { Thread.sleep(5); } // check if CasConsumer was called Assert.assertEquals("StatusCallbackListener", documentCount, listener.getEntityProcessCompleteCount()); Assert.assertEquals("CasConsumer process Count", documentCount, FunctionErrorStore.getCasConsumerProcessCount()); Assert.assertEquals("Annotator process count", documentCount, FunctionErrorStore.getAnnotatorProcessCount()); Assert.assertEquals("Collection reader getNext count", documentCount, FunctionErrorStore.getCollectionReaderGetNextCount()); Assert.assertEquals("number of annoators", threadCount, FunctionErrorStore.getAnnotatorCount()); } /** * Create a single processor which have to process multiple documents * * @throws Exception * - */ @Test public void testCasConsumerProcessingSingleThreadMultipleDocuments() throws Exception { // process 100 documents and a single thread int documentCount = 100; int threadCount = 1; // setup CPM to process 100 documents CollectionProcessingEngine cpe = setupCpm(documentCount, threadCount); // create and register a status callback listener TestStatusCallbackListener listener = new TestStatusCallbackListener(); cpe.addStatusCallbackListener(listener); // run CPM cpe.process(); // wait until CPM has finished while (!listener.isFinished()) { Thread.sleep(5); } // check if CasConsumer was called Assert.assertEquals("StatusCallbackListener", documentCount, listener.getEntityProcessCompleteCount()); Assert.assertEquals("CasConsumer process Count", documentCount, FunctionErrorStore.getCasConsumerProcessCount()); Assert.assertEquals("Annotator process count", documentCount, FunctionErrorStore.getAnnotatorProcessCount()); Assert.assertEquals("Collection reader getNext count", documentCount, FunctionErrorStore.getCollectionReaderGetNextCount()); Assert.assertEquals("number of annoators", threadCount, FunctionErrorStore.getAnnotatorCount()); } /** * Create multiple processors which have to process only one single document! * * @throws Exception * - */ @Test public void testCasConsumerProcessingMultipleThreadsSingleDocument() throws Exception { // process only a single document and multiple threads int documentCount = 1; int threadCount = 5; // setup CPM to process one document CollectionProcessingEngine cpe = setupCpm(documentCount, threadCount); // create and register a status callback listener TestStatusCallbackListener listener = new TestStatusCallbackListener(); cpe.addStatusCallbackListener(listener); // run CPM cpe.process(); // wait until CPM has finished while (!listener.isFinished()) { Thread.sleep(5); } // check if CasConsumer was called Assert.assertEquals("StatusCallbackListener", documentCount, listener.getEntityProcessCompleteCount()); Assert.assertEquals("CasConsumer process Count", documentCount, FunctionErrorStore.getCasConsumerProcessCount()); Assert.assertEquals("Annotator process count", documentCount, FunctionErrorStore.getAnnotatorProcessCount()); Assert.assertEquals("Collection reader getNext count", documentCount, FunctionErrorStore.getCollectionReaderGetNextCount()); Assert.assertEquals("number of annoators", threadCount, FunctionErrorStore.getAnnotatorCount()); } /** * Create multiple processors which have to process multiple documents * * @throws Exception * - */ @Test public void testCasConsumerProcessingMultipleThreadsMultipleDocuments() throws Exception { // process 100 documents and multiple threads int documentCount = 100; int threadCount = 5; // setup CPM to process 100 documents CollectionProcessingEngine cpe = setupCpm(documentCount, threadCount); // create and register a status callback listener TestStatusCallbackListener listener = new TestStatusCallbackListener(); cpe.addStatusCallbackListener(listener); // run CPM cpe.process(); // wait until CPM has finished while (!listener.isFinished()) { Thread.sleep(5); } // check if CasConsumer was called Assert.assertEquals("StatusCallbackListener", documentCount, listener.getEntityProcessCompleteCount()); Assert.assertEquals("CasConsumer process Count", documentCount, FunctionErrorStore.getCasConsumerProcessCount()); Assert.assertEquals("Annotator process count", documentCount, FunctionErrorStore.getAnnotatorProcessCount()); Assert.assertEquals("Collection reader getNext count", documentCount, FunctionErrorStore.getCollectionReaderGetNextCount()); Assert.assertEquals("number of annoators", threadCount, FunctionErrorStore.getAnnotatorCount()); } /** * setup the CPM with base functionality. * * @param documentCount * how many documents should be processed * @param threadCount * how many threads are used by the cpm * * @return CollectionProcessingEngine - initialized cpe */ private CollectionProcessingEngine setupCpm(int documentCount, int threadCount) throws Exception { CpeDescription cpeDesc = null; CollectionProcessingEngine cpe = null; try { String colReaderBase = JUnitExtension .getFile("CpmTests" + separator + "ErrorTestCollectionReader.xml").getAbsolutePath(); String taeBase = JUnitExtension.getFile("CpmTests" + separator + "ErrorTestAnnotator.xml") .getAbsolutePath(); String casConsumerBase = JUnitExtension .getFile("CpmTests" + separator + "ErrorTestCasConsumer.xml").getAbsolutePath(); // created needed descriptors String colReaderDesc = DescriptorMakeUtil.makeCollectionReader(colReaderBase, documentCount); String taeDesc = DescriptorMakeUtil.makeAnalysisEngine(taeBase); String casConsumerDesc = DescriptorMakeUtil.makeCasConsumer(casConsumerBase); // create cpm descriptor cpeDesc = CpeDescriptorFactory.produceDescriptor(); cpeDesc.setInputQueueSize(2); cpeDesc.setOutputQueueSize(2); cpeDesc.setProcessingUnitThreadCount(threadCount); // add tae CpeIntegratedCasProcessor integratedProcessor = CpeDescriptorFactory .produceCasProcessor("ErrorTestAnnotator"); integratedProcessor.setDescriptor(taeDesc); cpeDesc.addCasProcessor(integratedProcessor); // add casConsumer CpeIntegratedCasProcessor casConsumer = CpeDescriptorFactory .produceCasProcessor("ErrorTest CasConsumer"); casConsumer.setDescriptor(casConsumerDesc); cpeDesc.addCasProcessor(casConsumer); // add collectionReader cpeDesc.addCollectionReader(colReaderDesc); // produce cpe cpe = UIMAFramework.produceCollectionProcessingEngine(cpeDesc, null, null); } catch (Exception e) { e.printStackTrace(); } return cpe; } }
package org.keycloak.models.sessions.infinispan.compat; import org.keycloak.models.ClientModel; import org.keycloak.models.ClientSessionModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.ModelDuplicateException; import org.keycloak.models.RealmModel; import org.keycloak.models.UserModel; import org.keycloak.models.UserSessionModel; import org.keycloak.models.UserSessionProvider; import org.keycloak.models.UsernameLoginFailureModel; import org.keycloak.models.session.UserSessionPersisterProvider; import org.keycloak.models.sessions.infinispan.compat.entities.ClientSessionEntity; import org.keycloak.models.sessions.infinispan.compat.entities.UserSessionEntity; import org.keycloak.models.sessions.infinispan.compat.entities.UsernameLoginFailureEntity; import org.keycloak.models.sessions.infinispan.compat.entities.UsernameLoginFailureKey; import org.keycloak.models.utils.KeycloakModelUtils; import org.keycloak.models.utils.RealmInfoUtil; import org.keycloak.common.util.Time; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** * @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a> */ public class MemUserSessionProvider implements UserSessionProvider { private final KeycloakSession session; private final ConcurrentHashMap<String, UserSessionEntity> userSessions; private final ConcurrentHashMap<String, String> userSessionsByBrokerSessionId; private final ConcurrentHashMap<String, Set<String>> userSessionsByBrokerUserId; private final ConcurrentHashMap<String, ClientSessionEntity> clientSessions; private final ConcurrentHashMap<UsernameLoginFailureKey, UsernameLoginFailureEntity> loginFailures; private final ConcurrentHashMap<String, UserSessionEntity> offlineUserSessions; private final ConcurrentHashMap<String, ClientSessionEntity> offlineClientSessions; public MemUserSessionProvider(KeycloakSession session, ConcurrentHashMap<String, UserSessionEntity> userSessions, ConcurrentHashMap<String, String> userSessionsByBrokerSessionId, ConcurrentHashMap<String, Set<String>> userSessionsByBrokerUserId, ConcurrentHashMap<String, ClientSessionEntity> clientSessions, ConcurrentHashMap<UsernameLoginFailureKey, UsernameLoginFailureEntity> loginFailures, ConcurrentHashMap<String, UserSessionEntity> offlineUserSessions, ConcurrentHashMap<String, ClientSessionEntity> offlineClientSessions) { this.session = session; this.userSessions = userSessions; this.clientSessions = clientSessions; this.loginFailures = loginFailures; this.userSessionsByBrokerSessionId = userSessionsByBrokerSessionId; this.userSessionsByBrokerUserId = userSessionsByBrokerUserId; this.offlineUserSessions = offlineUserSessions; this.offlineClientSessions = offlineClientSessions; } @Override public ClientSessionModel createClientSession(RealmModel realm, ClientModel client) { ClientSessionEntity entity = new ClientSessionEntity(); entity.setId(KeycloakModelUtils.generateId()); entity.setTimestamp(Time.currentTime()); entity.setClientId(client.getId()); entity.setRealmId(realm.getId()); clientSessions.put(entity.getId(), entity); return new ClientSessionAdapter(session, this, realm, entity); } @Override public void removeClientSession(RealmModel realm, ClientSessionModel clientSession) { ClientSessionEntity entity = ((ClientSessionAdapter)clientSession).getEntity(); UserSessionModel userSession = clientSession.getUserSession(); if (userSession != null) { UserSessionEntity userSessionEntity = ((UserSessionAdapter)userSession).getEntity(); userSessionEntity.getClientSessions().remove(entity); } clientSessions.remove(clientSession.getId()); } @Override public ClientSessionModel getClientSession(RealmModel realm, String id) { ClientSessionEntity entity = clientSessions.get(id); return entity != null ? new ClientSessionAdapter(session, this, realm, entity) : null; } @Override public ClientSessionModel getClientSession(String id) { ClientSessionEntity entity = clientSessions.get(id); if (entity != null) { RealmModel realm = session.realms().getRealm(entity.getRealmId()); return new ClientSessionAdapter(session, this, realm, entity); } return null; } @Override public UserSessionModel createUserSession(RealmModel realm, UserModel user, String loginUsername, String ipAddress, String authMethod, boolean rememberMe, String brokerSessionId, String brokerUserId) { String id = KeycloakModelUtils.generateId(); UserSessionEntity entity = new UserSessionEntity(); entity.setId(id); entity.setRealm(realm.getId()); entity.setUser(user.getId()); entity.setLoginUsername(loginUsername); entity.setIpAddress(ipAddress); entity.setAuthMethod(authMethod); entity.setRememberMe(rememberMe); int currentTime = Time.currentTime(); entity.setStarted(currentTime); entity.setLastSessionRefresh(currentTime); entity.setBrokerSessionId(brokerSessionId); entity.setBrokerUserId(brokerUserId); userSessions.put(id, entity); if (brokerSessionId != null) { userSessionsByBrokerSessionId.put(brokerSessionId, id); } if (brokerUserId != null) { while (true) { // while loop gets around a race condition when a user session is removed Set<String> set = userSessionsByBrokerUserId.get(brokerUserId); if (set == null) { Set<String> value = new HashSet<>(); set = userSessionsByBrokerUserId.putIfAbsent(brokerUserId, value); if (set == null) { set = value; } } synchronized (set) { set.add(id); } if (userSessionsByBrokerUserId.get(brokerUserId) == set) { // we are ensured set isn't deleted before the new id is added break; } } } return new UserSessionAdapter(session, this, realm, entity); } @Override public List<UserSessionModel> getUserSessionByBrokerUserId(RealmModel realm, String brokerUserId) { Set<String> sessions = userSessionsByBrokerUserId.get(brokerUserId); if (sessions == null) return Collections.emptyList(); List<UserSessionModel> userSessions = new LinkedList<UserSessionModel>(); for (String id : sessions) { UserSessionModel userSession = getUserSession(realm, id); if (userSession != null) userSessions.add(userSession); } return userSessions; } @Override public UserSessionModel getUserSessionByBrokerSessionId(RealmModel realm, String brokerSessionId) { String id = userSessionsByBrokerSessionId.get(brokerSessionId); if (id == null) return null; return getUserSession(realm, id); } @Override public UserSessionModel getUserSession(RealmModel realm, String id) { UserSessionEntity entity = getUserSessionEntity(realm, id); return entity != null ? new UserSessionAdapter(session, this, realm, entity) : null; } UserSessionEntity getUserSessionEntity(RealmModel realm, String id) { UserSessionEntity entity = userSessions.get(id); if (entity != null && entity.getRealm().equals(realm.getId())) { return entity; } return null; } @Override public List<UserSessionModel> getUserSessions(RealmModel realm, UserModel user) { List<UserSessionModel> userSessions = new LinkedList<UserSessionModel>(); for (UserSessionEntity s : this.userSessions.values()) { if (s.getRealm().equals(realm.getId()) && s.getUser().equals(user.getId())) { userSessions.add(new UserSessionAdapter(session, this, realm, s)); } } return userSessions; } @Override public List<UserSessionModel> getUserSessionsByNote(RealmModel realm, String noteName, String noteValue) { List<UserSessionModel> userSessions = new LinkedList<UserSessionModel>(); for (UserSessionEntity s : this.userSessions.values()) { if (s.getRealm().equals(realm.getId()) && noteValue.equals(s.getNotes().get(noteName))) { userSessions.add(new UserSessionAdapter(session, this, realm, s)); } } return userSessions; } @Override public List<UserSessionModel> getUserSessions(RealmModel realm, ClientModel client) { return getUserSessions(realm, client, false); } protected List<UserSessionModel> getUserSessions(RealmModel realm, ClientModel client, boolean offline) { ConcurrentHashMap<String, ClientSessionEntity> clientSessions = offline ? this.offlineClientSessions : this.clientSessions; List<UserSessionEntity> userSessionEntities = new LinkedList<UserSessionEntity>(); for (ClientSessionEntity s : clientSessions.values()) { String realmId = realm.getId(); String clientId = client.getId(); if (s.getSession() != null && s.getSession().getRealm().equals(realmId) && s.getClientId().equals(clientId)) { if (!userSessionEntities.contains(s.getSession())) { userSessionEntities.add(s.getSession()); } } } List<UserSessionModel> userSessions = new LinkedList<UserSessionModel>(); for (UserSessionEntity e : userSessionEntities) { userSessions.add(new UserSessionAdapter(session, this, realm, e)); } Collections.sort(userSessions, new UserSessionSort()); return userSessions; } @Override public List<UserSessionModel> getUserSessions(RealmModel realm, ClientModel client, int firstResult, int maxResults) { return getUserSessions(realm, client, firstResult, maxResults, false); } protected List<UserSessionModel> getUserSessions(RealmModel realm, ClientModel client, int firstResult, int maxResults, boolean offline) { List<UserSessionModel> userSessions = getUserSessions(realm, client, offline); if (firstResult > userSessions.size()) { return Collections.emptyList(); } int toIndex = (firstResult + maxResults) < userSessions.size() ? firstResult + maxResults : userSessions.size(); return userSessions.subList(firstResult, toIndex); } @Override public int getActiveUserSessions(RealmModel realm, ClientModel client) { return getUserSessions(realm, client, false).size(); } @Override public void removeUserSession(RealmModel realm, UserSessionModel session) { UserSessionEntity entity = getUserSessionEntity(realm, session.getId()); if (entity != null) { userSessions.remove(entity.getId()); remove(entity, false); } } @Override public void removeUserSessions(RealmModel realm, UserModel user) { removeUserSessions(realm, user, false); } protected void removeUserSessions(RealmModel realm, UserModel user, boolean offline) { Iterator<UserSessionEntity> itr = offline ? offlineUserSessions.values().iterator() : userSessions.values().iterator(); while (itr.hasNext()) { UserSessionEntity s = itr.next(); if (s.getRealm().equals(realm.getId()) && s.getUser().equals(user.getId())) { itr.remove(); remove(s, offline); } } } protected void remove(UserSessionEntity s, boolean offline) { if (offline) { for (ClientSessionEntity clientSession : s.getClientSessions()) { offlineClientSessions.remove(clientSession.getId()); } } else { if (s.getBrokerSessionId() != null) { userSessionsByBrokerSessionId.remove(s.getBrokerSessionId()); } if (s.getBrokerUserId() != null) { Set<String> set = userSessionsByBrokerUserId.get(s.getBrokerUserId()); if (set != null) { synchronized (set) { set.remove(s.getId()); // this is a race condition :( // Since it will be very rare for a user to have concurrent sessions, I'm hoping we never hit this if (set.isEmpty()) userSessionsByBrokerUserId.remove(s.getBrokerUserId()); } } } for (ClientSessionEntity clientSession : s.getClientSessions()) { clientSessions.remove(clientSession.getId()); } } } @Override public void removeExpiredUserSessions(RealmModel realm) { UserSessionPersisterProvider persister = session.getProvider(UserSessionPersisterProvider.class); Iterator<UserSessionEntity> itr = userSessions.values().iterator(); while (itr.hasNext()) { UserSessionEntity s = itr.next(); if (s.getRealm().equals(realm.getId()) && (s.getLastSessionRefresh() < Time.currentTime() - realm.getSsoSessionIdleTimeout() || s.getStarted() < Time.currentTime() - realm.getSsoSessionMaxLifespan())) { itr.remove(); remove(s, false); } } int expired = Time.currentTime() - RealmInfoUtil.getDettachedClientSessionLifespan(realm); Iterator<ClientSessionEntity> citr = clientSessions.values().iterator(); while (citr.hasNext()) { ClientSessionEntity c = citr.next(); if (c.getSession() == null && c.getRealmId().equals(realm.getId()) && c.getTimestamp() < expired) { citr.remove(); } } // Remove expired offline user sessions itr = offlineUserSessions.values().iterator(); while (itr.hasNext()) { UserSessionEntity s = itr.next(); if (s.getRealm().equals(realm.getId()) && (s.getLastSessionRefresh() < Time.currentTime() - realm.getOfflineSessionIdleTimeout())) { itr.remove(); remove(s, true); // propagate to persister persister.removeUserSession(s.getId(), true); } } // Remove expired offline client sessions citr = offlineClientSessions.values().iterator(); while (citr.hasNext()) { ClientSessionEntity s = citr.next(); if (s.getRealmId().equals(realm.getId()) && (s.getTimestamp() < Time.currentTime() - realm.getOfflineSessionIdleTimeout())) { citr.remove(); // propagate to persister persister.removeClientSession(s.getId(), true); } } } @Override public void removeUserSessions(RealmModel realm) { removeUserSessions(realm, false); } protected void removeUserSessions(RealmModel realm, boolean offline) { Iterator<UserSessionEntity> itr = offline ? offlineUserSessions.values().iterator() : userSessions.values().iterator(); while (itr.hasNext()) { UserSessionEntity s = itr.next(); if (s.getRealm().equals(realm.getId())) { itr.remove(); remove(s, offline); } } Iterator<ClientSessionEntity> citr = offline ? offlineClientSessions.values().iterator() : clientSessions.values().iterator(); while (citr.hasNext()) { ClientSessionEntity c = citr.next(); if (c.getSession() == null && c.getRealmId().equals(realm.getId())) { citr.remove(); } } } @Override public UsernameLoginFailureModel getUserLoginFailure(RealmModel realm, String username) { UsernameLoginFailureEntity entity = loginFailures.get(new UsernameLoginFailureKey(realm.getId(), username)); return entity != null ? new UsernameLoginFailureAdapter(entity) : null; } @Override public UsernameLoginFailureModel addUserLoginFailure(RealmModel realm, String username) { UsernameLoginFailureKey key = new UsernameLoginFailureKey(realm.getId(), username); UsernameLoginFailureEntity entity = new UsernameLoginFailureEntity(username, realm.getId()); if (loginFailures.putIfAbsent(key, entity) != null) { throw new ModelDuplicateException(); } return new UsernameLoginFailureAdapter(entity); } @Override public void removeUserLoginFailure(RealmModel realm, String username) { loginFailures.remove(new UsernameLoginFailureKey(realm.getId(), username)); } @Override public void removeAllUserLoginFailures(RealmModel realm) { Iterator<UsernameLoginFailureEntity> itr = loginFailures.values().iterator(); while (itr.hasNext()) { if (itr.next().getRealm().equals(realm.getId())) { itr.remove(); } } } @Override public void onRealmRemoved(RealmModel realm) { removeUserSessions(realm, true); removeUserSessions(realm, false); removeAllUserLoginFailures(realm); } @Override public void onClientRemoved(RealmModel realm, ClientModel client) { onClientRemoved(realm, client, true); onClientRemoved(realm, client, false); } private void onClientRemoved(RealmModel realm, ClientModel client, boolean offline) { ConcurrentHashMap<String, ClientSessionEntity> clientSessionsMap = offline ? offlineClientSessions : clientSessions; for (ClientSessionEntity e : clientSessionsMap.values()) { if (e.getRealmId().equals(realm.getId()) && e.getClientId().equals(client.getId())) { clientSessionsMap.remove(e.getId()); e.getSession().removeClientSession(e); } } } @Override public void onUserRemoved(RealmModel realm, UserModel user) { removeUserSessions(realm, user, true); removeUserSessions(realm, user, false); loginFailures.remove(new UsernameLoginFailureKey(realm.getId(), user.getUsername())); loginFailures.remove(new UsernameLoginFailureKey(realm.getId(), user.getEmail())); } @Override public UserSessionModel createOfflineUserSession(UserSessionModel userSession) { UserSessionAdapter importedUserSession = importUserSession(userSession, true); // started and lastSessionRefresh set to current time int currentTime = Time.currentTime(); importedUserSession.getEntity().setStarted(currentTime); importedUserSession.setLastSessionRefresh(currentTime); return importedUserSession; } @Override public UserSessionAdapter importUserSession(UserSessionModel userSession, boolean offline) { UserSessionEntity entity = new UserSessionEntity(); entity.setId(userSession.getId()); entity.setRealm(userSession.getRealm().getId()); entity.setAuthMethod(userSession.getAuthMethod()); entity.setBrokerSessionId(userSession.getBrokerSessionId()); entity.setBrokerUserId(userSession.getBrokerUserId()); entity.setIpAddress(userSession.getIpAddress()); entity.setLoginUsername(userSession.getLoginUsername()); if (userSession.getNotes() != null) { entity.getNotes().putAll(userSession.getNotes()); } entity.setRememberMe(userSession.isRememberMe()); entity.setState(userSession.getState()); entity.setUser(userSession.getUser().getId()); entity.setStarted(userSession.getStarted()); entity.setLastSessionRefresh(userSession.getLastSessionRefresh()); ConcurrentHashMap<String, UserSessionEntity> sessionsMap = offline ? offlineUserSessions : userSessions; sessionsMap.put(userSession.getId(), entity); return new UserSessionAdapter(session, this, userSession.getRealm(), entity); } @Override public UserSessionModel getOfflineUserSession(RealmModel realm, String userSessionId) { UserSessionEntity entity = offlineUserSessions.get(userSessionId); if (entity != null && entity.getRealm().equals(realm.getId())) { return new UserSessionAdapter(session, this, realm, entity); } else { return null; } } @Override public void removeOfflineUserSession(RealmModel realm, String userSessionId) { UserSessionEntity entity = offlineUserSessions.get(userSessionId); if (entity != null && entity.getRealm().equals(realm.getId())) { offlineUserSessions.remove(entity); remove(entity, true); } } @Override public ClientSessionModel createOfflineClientSession(ClientSessionModel clientSession) { ClientSessionAdapter offlineClientSession = importClientSession(clientSession, true); // update timestamp to current time offlineClientSession.setTimestamp(Time.currentTime()); return offlineClientSession; } @Override public ClientSessionAdapter importClientSession(ClientSessionModel clientSession, boolean offline) { ClientSessionEntity entity = new ClientSessionEntity(); entity.setId(clientSession.getId()); entity.setRealmId(clientSession.getRealm().getId()); entity.setAction(clientSession.getAction()); entity.setAuthenticatorStatus(clientSession.getExecutionStatus()); entity.setAuthMethod(clientSession.getAuthMethod()); if (clientSession.getAuthenticatedUser() != null) { entity.setAuthUserId(clientSession.getAuthenticatedUser().getId()); } entity.setClientId(clientSession.getClient().getId()); if (clientSession.getNotes() != null) { entity.getNotes().putAll(clientSession.getNotes()); } entity.setProtocolMappers(clientSession.getProtocolMappers()); entity.setRedirectUri(clientSession.getRedirectUri()); entity.setRoles(clientSession.getRoles()); entity.setTimestamp(clientSession.getTimestamp()); if (clientSession.getUserSessionNotes() != null) { entity.getUserSessionNotes().putAll(clientSession.getUserSessionNotes()); } ConcurrentHashMap<String, ClientSessionEntity> clientSessionsMap = offline ? offlineClientSessions : clientSessions; clientSessionsMap.put(clientSession.getId(), entity); return new ClientSessionAdapter(session, this, clientSession.getRealm(), entity); } @Override public ClientSessionModel getOfflineClientSession(RealmModel realm, String clientSessionId) { ClientSessionEntity entity = offlineClientSessions.get(clientSessionId); if (entity != null && entity.getRealmId().equals(realm.getId())) { return new ClientSessionAdapter(session, this, realm, entity); } else { return null; } } @Override public List<ClientSessionModel> getOfflineClientSessions(RealmModel realm, UserModel user) { List<ClientSessionModel> clientSessions = new LinkedList<>(); for (UserSessionEntity s : this.offlineUserSessions.values()) { if (s.getRealm().equals(realm.getId()) && s.getUser().equals(user.getId())) { for (ClientSessionEntity cls : s.getClientSessions()) { ClientSessionAdapter clAdapter = new ClientSessionAdapter(session, this, realm, cls); clientSessions.add(clAdapter); } } } return clientSessions; } @Override public void removeOfflineClientSession(RealmModel realm, String clientSessionId) { ClientSessionEntity entity = offlineClientSessions.get(clientSessionId); if (entity != null && entity.getRealmId().equals(realm.getId())) { offlineClientSessions.remove(entity.getId()); UserSessionEntity userSession = entity.getSession(); userSession.removeClientSession(entity); } } @Override public int getOfflineSessionsCount(RealmModel realm, ClientModel client) { return getUserSessions(realm, client, true).size(); } @Override public List<UserSessionModel> getOfflineUserSessions(RealmModel realm, ClientModel client, int first, int max) { return getUserSessions(realm, client, first, max, true); } @Override public void close() { } private class UserSessionSort implements Comparator<UserSessionModel> { @Override public int compare(UserSessionModel o1, UserSessionModel o2) { int r = o1.getStarted() - o2.getStarted(); if (r == 0) { return o1.getId().compareTo(o2.getId()); } else { return r; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a.s3guard; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintStream; import java.net.URI; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.s3a.S3AUtils; import org.apache.hadoop.util.StopWatch; import com.google.common.base.Preconditions; import org.apache.hadoop.fs.FileSystem; import org.junit.Assume; import org.junit.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.fs.s3a.AbstractS3ATestBase; import org.apache.hadoop.fs.s3a.Constants; import org.apache.hadoop.fs.s3a.S3AFileStatus; import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.fs.s3a.S3ATestUtils; import org.apache.hadoop.fs.s3a.commit.CommitConstants; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.StringUtils; import static org.apache.hadoop.fs.s3a.Constants.METADATASTORE_AUTHORITATIVE; import static org.apache.hadoop.fs.s3a.Constants.S3GUARD_DDB_TABLE_NAME_KEY; import static org.apache.hadoop.fs.s3a.Constants.S3GUARD_METASTORE_NULL; import static org.apache.hadoop.fs.s3a.Constants.S3_METADATA_STORE_IMPL; import static org.apache.hadoop.fs.s3a.S3AUtils.clearBucketOption; import static org.apache.hadoop.fs.s3a.s3guard.S3GuardTool.E_BAD_STATE; import static org.apache.hadoop.fs.s3a.s3guard.S3GuardTool.SUCCESS; import static org.apache.hadoop.test.LambdaTestUtils.intercept; /** * Common functionality for S3GuardTool test cases. */ public abstract class AbstractS3GuardToolTestBase extends AbstractS3ATestBase { protected static final String OWNER = "hdfs"; protected static final String DYNAMODB_TABLE = "dynamodb://ireland-team"; protected static final String S3A_THIS_BUCKET_DOES_NOT_EXIST = "s3a://this-bucket-does-not-exist-00000000000"; private static final int PRUNE_MAX_AGE_SECS = 2; private MetadataStore ms; private S3AFileSystem rawFs; protected static void expectResult(int expected, String message, S3GuardTool tool, String... args) throws Exception { assertEquals(message, expected, tool.run(args)); } protected static void expectSuccess( String message, S3GuardTool tool, String... args) throws Exception { assertEquals(message, SUCCESS, tool.run(args)); } /** * Run a S3GuardTool command from a varags list. * @param conf configuration * @param args argument list * @return the return code * @throws Exception any exception */ protected int run(Configuration conf, String... args) throws Exception { return S3GuardTool.run(conf, args); } /** * Run a S3GuardTool command from a varags list and the * configuration returned by {@code getConfiguration()}. * @param args argument list * @return the return code * @throws Exception any exception */ protected int run(String... args) throws Exception { return S3GuardTool.run(getConfiguration(), args); } /** * Run a S3GuardTool command from a varags list, catch any raised * ExitException and verify the status code matches that expected. * @param status expected status code of the exception * @param args argument list * @throws Exception any exception */ protected void runToFailure(int status, String... args) throws Exception { ExitUtil.ExitException ex = intercept(ExitUtil.ExitException.class, new Callable<Integer>() { @Override public Integer call() throws Exception { return run(args); } }); if (ex.status != status) { throw ex; } } protected MetadataStore getMetadataStore() { return ms; } @Override public void setup() throws Exception { super.setup(); S3ATestUtils.assumeS3GuardState(true, getConfiguration()); S3AFileSystem fs = getFileSystem(); ms = fs.getMetadataStore(); // Also create a "raw" fs without any MetadataStore configured Configuration conf = new Configuration(getConfiguration()); clearBucketOption(conf, fs.getBucket(), S3_METADATA_STORE_IMPL); conf.set(S3_METADATA_STORE_IMPL, S3GUARD_METASTORE_NULL); URI fsUri = fs.getUri(); S3AUtils.setBucketOption(conf,fsUri.getHost(), METADATASTORE_AUTHORITATIVE, S3GUARD_METASTORE_NULL); rawFs = (S3AFileSystem) FileSystem.newInstance(fsUri, conf); } @Override public void teardown() throws Exception { super.teardown(); IOUtils.cleanupWithLogger(LOG, ms); IOUtils.closeStream(rawFs); } protected void mkdirs(Path path, boolean onS3, boolean onMetadataStore) throws IOException { Preconditions.checkArgument(onS3 || onMetadataStore); // getFileSystem() returns an fs with MetadataStore configured S3AFileSystem fs = onMetadataStore ? getFileSystem() : rawFs; if (onS3) { fs.mkdirs(path); } else if (onMetadataStore) { S3AFileStatus status = new S3AFileStatus(true, path, OWNER); ms.put(new PathMetadata(status)); } } protected static void putFile(MetadataStore ms, S3AFileStatus f) throws IOException { assertNotNull(f); ms.put(new PathMetadata(f)); Path parent = f.getPath().getParent(); while (parent != null) { S3AFileStatus dir = new S3AFileStatus(false, parent, f.getOwner()); ms.put(new PathMetadata(dir)); parent = parent.getParent(); } } /** * Create file either on S3 or in metadata store. * @param path the file path. * @param onS3 set to true to create the file on S3. * @param onMetadataStore set to true to create the file on the * metadata store. * @throws IOException IO problem */ protected void createFile(Path path, boolean onS3, boolean onMetadataStore) throws IOException { Preconditions.checkArgument(onS3 || onMetadataStore); // getFileSystem() returns an fs with MetadataStore configured S3AFileSystem fs = onMetadataStore ? getFileSystem() : rawFs; if (onS3) { ContractTestUtils.touch(fs, path); } else if (onMetadataStore) { S3AFileStatus status = new S3AFileStatus(100L, System.currentTimeMillis(), fs.qualify(path), 512L, "hdfs"); putFile(ms, status); } } /** * Attempt to test prune() with sleep() without having flaky tests * when things run slowly. Test is basically: * 1. Set max path age to X seconds * 2. Create some files (which writes entries to MetadataStore) * 3. Sleep X+2 seconds (all files from above are now "stale") * 4. Create some other files (these are "fresh"). * 5. Run prune on MetadataStore. * 6. Assert that only files that were created before the sleep() were pruned. * * Problem is: #6 can fail if X seconds elapse between steps 4 and 5, since * the newer files also become stale and get pruned. This is easy to * reproduce by running all integration tests in parallel with a ton of * threads, or anything else that slows down execution a lot. * * Solution: Keep track of time elapsed between #4 and #5, and if it * exceeds X, just print a warn() message instead of failing. * * @param cmdConf configuration for command * @param parent path * @param args command args * @throws Exception */ private void testPruneCommand(Configuration cmdConf, Path parent, String...args) throws Exception { Path keepParent = path("prune-cli-keep"); StopWatch timer = new StopWatch(); try { S3GuardTool.Prune cmd = new S3GuardTool.Prune(cmdConf); cmd.setMetadataStore(ms); getFileSystem().mkdirs(parent); getFileSystem().mkdirs(keepParent); createFile(new Path(parent, "stale"), true, true); createFile(new Path(keepParent, "stale-to-keep"), true, true); Thread.sleep(TimeUnit.SECONDS.toMillis(PRUNE_MAX_AGE_SECS + 2)); timer.start(); createFile(new Path(parent, "fresh"), true, true); assertMetastoreListingCount(parent, "Children count before pruning", 2); exec(cmd, args); long msecElapsed = timer.now(TimeUnit.MILLISECONDS); if (msecElapsed >= PRUNE_MAX_AGE_SECS * 1000) { LOG.warn("Skipping an assertion: Test running too slowly ({} msec)", msecElapsed); } else { assertMetastoreListingCount(parent, "Pruned children count remaining", 1); } assertMetastoreListingCount(keepParent, "This child should have been kept (prefix restriction).", 1); } finally { getFileSystem().delete(parent, true); ms.prune(Long.MAX_VALUE); } } private void assertMetastoreListingCount(Path parent, String message, int expected) throws IOException { Collection<PathMetadata> listing = ms.listChildren(parent).getListing(); assertEquals(message +" [" + StringUtils.join(", ", listing) + "]", expected, listing.size()); } @Test public void testPruneCommandCLI() throws Exception { Path testPath = path("testPruneCommandCLI"); testPruneCommand(getFileSystem().getConf(), testPath, "prune", "-seconds", String.valueOf(PRUNE_MAX_AGE_SECS), testPath.toString()); } @Test public void testPruneCommandConf() throws Exception { getConfiguration().setLong(Constants.S3GUARD_CLI_PRUNE_AGE, TimeUnit.SECONDS.toMillis(PRUNE_MAX_AGE_SECS)); Path testPath = path("testPruneCommandConf"); testPruneCommand(getConfiguration(), testPath, "prune", testPath.toString()); } @Test public void testSetCapacityFailFast() throws Exception{ Configuration conf = getConfiguration(); conf.set(S3GUARD_DDB_TABLE_NAME_KEY, getFileSystem().getBucket()); S3GuardTool.SetCapacity cmdR = new S3GuardTool.SetCapacity(conf); String[] argsR = new String[]{cmdR.getName(), "-read", "0", "s3a://bucket"}; intercept(IllegalArgumentException.class, S3GuardTool.SetCapacity.READ_CAP_INVALID, () -> cmdR.run(argsR)); S3GuardTool.SetCapacity cmdW = new S3GuardTool.SetCapacity(conf); String[] argsW = new String[]{cmdW.getName(), "-write", "0", "s3a://bucket"}; intercept(IllegalArgumentException.class, S3GuardTool.SetCapacity.WRITE_CAP_INVALID, () -> cmdW.run(argsW)); } @Test public void testDestroyNoBucket() throws Throwable { intercept(FileNotFoundException.class, new Callable<Integer>() { @Override public Integer call() throws Exception { return run(S3GuardTool.Destroy.NAME, S3A_THIS_BUCKET_DOES_NOT_EXIST); } }); } @Test public void testProbeForMagic() throws Throwable { S3AFileSystem fs = getFileSystem(); String name = fs.getUri().toString(); S3GuardTool.BucketInfo cmd = new S3GuardTool.BucketInfo( getConfiguration()); if (fs.hasCapability( CommitConstants.STORE_CAPABILITY_MAGIC_COMMITTER)) { // if the FS is magic, expect this to work exec(cmd, S3GuardTool.BucketInfo.MAGIC_FLAG, name); } else { // if the FS isn't magic, expect the probe to fail ExitUtil.ExitException e = intercept(ExitUtil.ExitException.class, () -> exec(cmd, S3GuardTool.BucketInfo.MAGIC_FLAG, name)); if (e.getExitCode() != E_BAD_STATE) { throw e; } } } /** * Get the test CSV file; assume() that it is not modified (i.e. we haven't * switched to a new storage infrastructure where the bucket is no longer * read only). * @return test file. */ protected String getLandsatCSVFile() { String csvFile = getConfiguration() .getTrimmed(KEY_CSVTEST_FILE, DEFAULT_CSVTEST_FILE); Assume.assumeTrue("CSV test file is not the default", DEFAULT_CSVTEST_FILE.equals(csvFile)); return csvFile; } /** * Execute a command, returning the buffer if the command actually completes. * If an exception is raised the output is logged instead. * @param cmd command * @param args argument list * @throws Exception on any failure */ public String exec(S3GuardTool cmd, String...args) throws Exception { ByteArrayOutputStream buf = new ByteArrayOutputStream(); try { exec(cmd, buf, args); return buf.toString(); } catch (AssertionError e) { throw e; } catch (Exception e) { LOG.error("Command {} failed: \n{}", cmd, buf); throw e; } } /** * Execute a command, saving the output into the buffer. * @param cmd command * @param buf buffer to use for tool output (not SLF4J output) * @param args argument list * @throws Exception on any failure */ protected void exec(S3GuardTool cmd, ByteArrayOutputStream buf, String...args) throws Exception { LOG.info("exec {}", (Object) args); int r = 0; try(PrintStream out =new PrintStream(buf)) { r = cmd.run(args, out); out.flush(); } assertEquals("Command " + cmd + " failed\n"+ buf, 0, r); } @Test public void testDiffCommand() throws Exception { S3AFileSystem fs = getFileSystem(); ms = getMetadataStore(); Set<Path> filesOnS3 = new HashSet<>(); // files on S3. Set<Path> filesOnMS = new HashSet<>(); // files on metadata store. Path testPath = path("test-diff"); // clean up through the store and behind it. fs.delete(testPath, true); rawFs.delete(testPath, true); mkdirs(testPath, true, true); Path msOnlyPath = new Path(testPath, "ms_only"); mkdirs(msOnlyPath, false, true); filesOnMS.add(msOnlyPath); for (int i = 0; i < 5; i++) { Path file = new Path(msOnlyPath, String.format("file-%d", i)); createFile(file, false, true); filesOnMS.add(file); } Path s3OnlyPath = new Path(testPath, "s3_only"); mkdirs(s3OnlyPath, true, false); filesOnS3.add(s3OnlyPath); for (int i = 0; i < 5; i++) { Path file = new Path(s3OnlyPath, String.format("file-%d", i)); createFile(file, true, false); filesOnS3.add(file); } ByteArrayOutputStream buf = new ByteArrayOutputStream(); S3GuardTool.Diff cmd = new S3GuardTool.Diff(fs.getConf()); cmd.setStore(ms); exec(cmd, buf, "diff", "-meta", DYNAMODB_TABLE, testPath.toString()); Set<Path> actualOnS3 = new HashSet<>(); Set<Path> actualOnMS = new HashSet<>(); boolean duplicates = false; try (BufferedReader reader = new BufferedReader(new InputStreamReader( new ByteArrayInputStream(buf.toByteArray())))) { String line; while ((line = reader.readLine()) != null) { String[] fields = line.split("\\s"); assertEquals("[" + line + "] does not have enough fields", 4, fields.length); String where = fields[0]; Path path = new Path(fields[3]); if (S3GuardTool.Diff.S3_PREFIX.equals(where)) { duplicates = duplicates || actualOnS3.contains(path); actualOnS3.add(path); } else if (S3GuardTool.Diff.MS_PREFIX.equals(where)) { duplicates = duplicates || actualOnMS.contains(path); actualOnMS.add(path); } else { fail("Unknown prefix: " + where); } } } String actualOut = buf.toString(); assertEquals("Mismatched metadata store outputs: " + actualOut, filesOnMS, actualOnMS); assertEquals("Mismatched s3 outputs: " + actualOut, filesOnS3, actualOnS3); assertFalse("Diff contained duplicates", duplicates); } }
/* * This file is part of the DITA Open Toolkit project. * * Copyright 2011 Jarno Elovirta * * See the accompanying LICENSE file for applicable license. */ package org.dita.dost.util; import static org.dita.dost.util.Constants.*; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Pattern; import com.google.common.annotations.VisibleForTesting; import net.sf.saxon.s9api.XdmNode; import net.sf.saxon.s9api.XdmNodeKind; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.xml.sax.Attributes; /** * DITA specialization hierarchy object. * * <p>Instances of this class are immutable and are safe for use by multiple concurrent threads.</p> * * @since 1.5.3 * @author Jarno Elovirta */ public final class DitaClass { // Variables private static final Pattern WHITESPACE = Pattern.compile("\\s+"); private static final Pattern VALID_DITA_CLASS = Pattern.compile("(\\+|-)\\s+(topic|map)/\\S+\\s+" + "([\\S[^/]]+/\\S+\\s+)*"); private static final Map<String, DitaClass> cache = new ConcurrentHashMap<>(); /** ModuleElem/type pair for the most specialized type, with a single preceding and following space character. */ public final String matcher; /** Type name, i.e. local element name. */ public final String localName; /** Normalized specialization hierarchy string. */ private final String stringValue; /** Does this class value use valid DITA class syntax */ private boolean validDitaClass = false; // Constructors /** * Constructor. Use {@link #getInstance(String)} instead. * * @param cls DITA specialization hierarchy string */ @VisibleForTesting DitaClass(final String cls) { final String[] tokens = WHITESPACE.split(cls); final String last = tokens[tokens.length - 1]; matcher = ' ' + last + ' '; localName = last.substring(last.indexOf('/') + 1); final StringBuilder sb = new StringBuilder(); for (final String s: tokens) { sb.append(s).append(' '); } stringValue = sb.toString(); validDitaClass = VALID_DITA_CLASS.matcher(stringValue).matches(); } /** * Get class instance. * @param cls DITA class, may be {@code null} * @return DITA class, {@code null} if the input was {@code null} */ public static DitaClass getInstance(final String cls) { if (cls == null) { return null; } return cache.computeIfAbsent(WHITESPACE.matcher(cls).replaceAll(" "), DitaClass::new); } /** * Get class instance. * @param atts attributes, may be {@code null} * @return DITA class, {@code null} if the input didn't contains a class */ public static DitaClass getInstance(final Attributes atts) { if (atts == null) { return null; } return getInstance(atts.getValue(ATTRIBUTE_NAME_CLASS)); } /** * Get class instance. * @param elem element, may be {@code null} * @return DITA class, {@code null} if the input didn't contains a class */ public static DitaClass getInstance(final Element elem) { if (elem == null) { return null; } final Attr attr = elem.getAttributeNode(ATTRIBUTE_NAME_CLASS); if (attr == null) { return null; } return getInstance(attr.getNodeValue()); } // Public methods @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (stringValue.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final DitaClass other = (DitaClass) obj; return stringValue.equals(other.stringValue); } /** * Get DITA specialization hierarchy string, i.e. the class attribute value. * * @return specialization hierarchy string */ @Override public String toString() { return stringValue; } /** * Test if given DITA class matches this DITA class. * * @param cls DITA element class * @return {@code true} if given class matches this class, otherwise {@code false} */ public boolean matches(final DitaClass cls) { return cls != null && cls.toString().contains(matcher); } /** * Test if given DITA class string matches this DITA class. * * @param classString DITA element class string * @return {@code true} if given class matches this class, otherwise {@code false} */ public boolean matches(final String classString) { return classString != null && classString.contains(matcher); } /** * Test if given DITA class string matches this DITA class. * * @param atts SAX attributes * @return {@code true} if given attribute set has a class attribute and it matches this class, otherwise {@code false} */ public boolean matches(final Attributes atts) { return matches(atts.getValue(ATTRIBUTE_NAME_CLASS)); } /** * Test if given DITA class string matches this DITA class. * * @param node DOM DITA element * @return {@code true} if given node is an Element and its class matches this class, otherwise {@code false} */ public boolean matches(final Node node) { if (node.getNodeType() == Node.ELEMENT_NODE) { return matches(((Element) node).getAttribute(ATTRIBUTE_NAME_CLASS)); } return false; } /** * Test if given DITA class string matches this DITA class. * * @param node S9api DITA element * @return {@code true} if given node is an Element and its class matches this class, otherwise {@code false} */ public boolean matches(final XdmNode node) { if (node.getNodeKind() == XdmNodeKind.ELEMENT) { return matches(node.attribute(ATTRIBUTE_NAME_CLASS)); } return false; } /** * Test if the current DitaClass is a valid DITA class value * * @return {@code true} if uses valid DITA class syntax, otherwise {@code false} */ public boolean isValid () { return validDitaClass; } }
package org.hisp.dhis.dxf2.metadata; /* * Copyright (c) 2004-2016, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import com.google.common.base.MoreObjects; import org.hisp.dhis.common.IdentifiableObject; import org.hisp.dhis.common.MergeMode; import org.hisp.dhis.dxf2.metadata.feedback.ImportReportMode; import org.hisp.dhis.dxf2.metadata.objectbundle.ObjectBundleMode; import org.hisp.dhis.dxf2.metadata.objectbundle.ObjectBundleParams; import org.hisp.dhis.importexport.ImportStrategy; import org.hisp.dhis.preheat.PreheatIdentifier; import org.hisp.dhis.preheat.PreheatMode; import org.hisp.dhis.scheduling.TaskId; import org.hisp.dhis.schema.Schema; import org.hisp.dhis.system.util.ReflectionUtils; import org.hisp.dhis.user.User; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author Morten Olav Hansen <mortenoh@gmail.com> */ public class MetadataImportParams { private User user; private ObjectBundleMode importMode = ObjectBundleMode.COMMIT; private PreheatIdentifier identifier = PreheatIdentifier.UID; private PreheatMode preheatMode = PreheatMode.REFERENCE; private ImportStrategy importStrategy = ImportStrategy.CREATE_AND_UPDATE; private AtomicMode atomicMode = AtomicMode.ALL; private MergeMode mergeMode = MergeMode.REPLACE; private FlushMode flushMode = FlushMode.AUTO; private ImportReportMode importReportMode = ImportReportMode.ERRORS; private boolean skipSharing; private boolean skipValidation; private TaskId taskId; private Map<Class<? extends IdentifiableObject>, List<IdentifiableObject>> objects = new HashMap<>(); public MetadataImportParams() { } public MetadataImportParams( List<? extends IdentifiableObject> objects ) { addObjects( objects ); } public String getUsername() { return user != null ? user.getUsername() : "system-process"; } public User getUser() { return user; } public void setUser( User user ) { this.user = user; } public ObjectBundleMode getImportMode() { return importMode; } public void setImportMode( ObjectBundleMode importMode ) { this.importMode = importMode; } public PreheatIdentifier getIdentifier() { return identifier; } public void setIdentifier( PreheatIdentifier identifier ) { this.identifier = identifier; } public PreheatMode getPreheatMode() { return preheatMode; } public void setPreheatMode( PreheatMode preheatMode ) { this.preheatMode = preheatMode; } public ImportStrategy getImportStrategy() { return importStrategy; } public MetadataImportParams setImportStrategy( ImportStrategy importStrategy ) { this.importStrategy = importStrategy; return this; } public AtomicMode getAtomicMode() { return atomicMode; } public void setAtomicMode( AtomicMode atomicMode ) { this.atomicMode = atomicMode; } public MergeMode getMergeMode() { return mergeMode; } public void setMergeMode( MergeMode mergeMode ) { this.mergeMode = mergeMode; } public FlushMode getFlushMode() { return flushMode; } public void setFlushMode( FlushMode flushMode ) { this.flushMode = flushMode; } public ImportReportMode getImportReportMode() { return importReportMode; } public MetadataImportParams setImportReportMode( ImportReportMode importReportMode ) { this.importReportMode = importReportMode; return this; } public boolean isSkipSharing() { return skipSharing; } public void setSkipSharing( boolean skipSharing ) { this.skipSharing = skipSharing; } public boolean isSkipValidation() { return skipValidation; } public void setSkipValidation( boolean skipValidation ) { this.skipValidation = skipValidation; } public TaskId getTaskId() { return taskId; } public void setTaskId( TaskId taskId ) { this.taskId = taskId; } public boolean hasTaskId() { return taskId != null; } public Map<Class<? extends IdentifiableObject>, List<IdentifiableObject>> getObjects() { return objects; } public void setObjects( Map<Class<? extends IdentifiableObject>, List<IdentifiableObject>> objects ) { this.objects = objects; } public List<Class<? extends IdentifiableObject>> getClasses() { return new ArrayList<>( objects.keySet() ); } public List<? extends IdentifiableObject> getObjects( Class<? extends IdentifiableObject> klass ) { return objects.get( klass ); } public MetadataImportParams addObject( IdentifiableObject object ) { if ( object == null ) { return this; } Class<? extends IdentifiableObject> klass = object.getClass(); if ( !objects.containsKey( klass ) ) { objects.put( klass, new ArrayList<>() ); } objects.get( klass ).add( klass.cast( object ) ); return this; } public MetadataImportParams addObjects( List<? extends IdentifiableObject> objects ) { objects.forEach( this::addObject ); return this; } @SuppressWarnings( "unchecked" ) public MetadataImportParams addMetadata( List<Schema> schemas, Metadata metadata ) { Map<Class<? extends IdentifiableObject>, List<IdentifiableObject>> objectMap = new HashMap<>(); for ( Schema schema : schemas ) { Object value = ReflectionUtils.invokeGetterMethod( schema.getPlural(), metadata ); if ( value != null ) { if ( Collection.class.isAssignableFrom( value.getClass() ) && schema.isIdentifiableObject() ) { List<IdentifiableObject> objects = new ArrayList<>( (Collection<IdentifiableObject>) value ); if ( !objects.isEmpty() ) { objectMap.put( (Class<? extends IdentifiableObject>) schema.getKlass(), objects ); } } } } setObjects( objectMap ); return this; } public ObjectBundleParams toObjectBundleParams() { ObjectBundleParams params = new ObjectBundleParams(); params.setUser( user ); params.setSkipSharing( skipSharing ); params.setSkipValidation( skipValidation ); params.setTaskId( taskId ); params.setImportStrategy( importStrategy ); params.setAtomicMode( atomicMode ); params.setObjects( objects ); params.setPreheatIdentifier( identifier ); params.setPreheatMode( preheatMode ); params.setObjectBundleMode( importMode ); params.setMergeMode( mergeMode ); params.setFlushMode( flushMode ); params.setImportReportMode( importReportMode ); return params; } @Override public String toString() { return MoreObjects.toStringHelper( this ) .add( "user", user ) .add( "importMode", importMode ) .add( "identifier", identifier ) .add( "preheatMode", preheatMode ) .add( "importStrategy", importStrategy ) .add( "mergeMode", mergeMode ) .toString(); } }
package psidev.psi.mi.jami.xml.io.writer.expanded.extended; import junit.framework.Assert; import org.junit.Test; import psidev.psi.mi.jami.model.Complex; import psidev.psi.mi.jami.model.InteractionEvidence; import psidev.psi.mi.jami.model.ParticipantEvidence; import psidev.psi.mi.jami.model.Source; import psidev.psi.mi.jami.xml.model.extension.*; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.datatype.DatatypeFactory; import javax.xml.stream.XMLStreamException; import java.io.StringWriter; import java.util.Arrays; /** * Unit tester for ExpandedXmlEvidenceWriter * * @author Marine Dumousseau (marine@ebi.ac.uk) * @version $Id$ * @since <pre>26/11/13</pre> */ public class ExpandedXml25EvidenceWriterTest { private String interaction = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_multiple = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"5\">\n" + " <experimentList>\n" + " <experimentDescription id=\"6\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"7\">\n" + " <interactor id=\"8\">\n" + " <names>\n" + " <shortLabel>protein test2</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_same_experiment_interactors = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n" + " </experimentList>\n"+ " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"5\">\n" + " <experimentList>\n" + " <experimentDescription id=\"6\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"7\">\n" + " <interactor id=\"8\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_complexes = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactionRef>4</interactionRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"4\">\n" + " <names>\n" + " <shortLabel>test complex</shortLabel>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentDescription id=\"5\">\n" + " <names>\n" + " <fullName>Mock publication and experiment for abstract interactions that are not interaction evidences.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication and experiment for abstract interactions that are not interaction evidences.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"6\">\n" + " <interactor id=\"7\">\n" + " <names>\n" + " <shortLabel>test protein</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_complexes_as_interactor = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>test complex</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>complex</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0314\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_different_entries1 = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_different_entries2 = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"5\">\n" + " <experimentList>\n" + " <experimentDescription id=\"6\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"7\">\n" + " <interactor id=\"8\">\n" + " <names>\n" + " <shortLabel>protein test2</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"5\">\n" + " <experimentList>\n" + " <experimentDescription id=\"6\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"7\">\n" + " <interactor id=\"8\">\n" + " <names>\n" + " <shortLabel>protein test2</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_source = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <source releaseDate=\"2013-09-02Z\">\n" + " <names>\n" + " <shortLabel>intact</shortLabel>\n"+ " </names>\n"+ " </source>\n"+ " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_different_source = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <source releaseDate=\"2013-09-02Z\">\n" + " <names>\n" + " <shortLabel>mint</shortLabel>\n"+ " </names>\n"+ " </source>\n"+ " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"5\">\n" + " <experimentList>\n" + " <experimentDescription id=\"6\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"7\">\n" + " <interactor id=\"8\">\n" + " <names>\n" + " <shortLabel>protein test2</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + " <entry>\n" + " <source releaseDate=\"2013-09-02Z\">\n" + " <names>\n" + " <shortLabel>intact</shortLabel>\n"+ " </names>\n"+ " </source>\n"+ " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"5\">\n" + " <experimentList>\n" + " <experimentDescription id=\"6\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"7\">\n" + " <interactor id=\"8\">\n" + " <names>\n" + " <shortLabel>protein test2</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_availability = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <availability id=\"2\">copyright</availability>\n" + " <experimentList>\n" + " <experimentDescription id=\"3\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"4\">\n" + " <interactor id=\"5\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <experimentalRoleList>\n" + " <experimentalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </experimentalRole>\n" + " </experimentalRoleList>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; @Test(expected = IllegalStateException.class) public void test_not_initialised_writer() { ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(); writer.write(new XmlInteractionEvidence()); } @Test(expected = IllegalArgumentException.class) public void test_not_initialised_no_options() { ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(); writer.initialiseContext(null); } @Test public void test_single_interaction() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(new XmlProtein("protein test")); interaction.addParticipant(participant); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); writer.start(); writer.write(interaction); writer.end(); writer.close(); Assert.assertEquals(this.interaction, stringWriter.toString()); } @Test public void test_several_interactions1() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(new XmlProtein("protein test")); interaction.addParticipant(participant); InteractionEvidence interaction2 = new XmlInteractionEvidence(); ParticipantEvidence participant2 = new XmlParticipantEvidence(new XmlProtein("protein test2")); interaction2.addParticipant(participant2); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); interaction2.setExperiment(interaction.getExperiment()); writer.start(); writer.write(Arrays.asList(interaction, interaction2)); writer.end(); writer.close(); Assert.assertEquals(this.interaction_multiple, stringWriter.toString()); } @Test public void test_several_interactions2() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(new XmlProtein("protein test")); interaction.addParticipant(participant); InteractionEvidence interaction2 = new XmlInteractionEvidence(); ParticipantEvidence participant2 = new XmlParticipantEvidence(new XmlProtein("protein test2")); interaction2.addParticipant(participant2); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); interaction2.setExperiment(interaction.getExperiment()); writer.start(); writer.write(Arrays.asList(interaction, interaction2).iterator()); writer.end(); writer.close(); Assert.assertEquals(this.interaction_multiple, stringWriter.toString()); } @Test public void test_interactions_same_interactors1() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(new XmlProtein("protein test")); interaction.addParticipant(participant); InteractionEvidence interaction2 = new XmlInteractionEvidence(); ParticipantEvidence participant2 = new XmlParticipantEvidence(participant.getInteractor()); interaction2.addParticipant(participant2); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); interaction2.setExperiment(interaction.getExperiment()); writer.start(); writer.write(Arrays.asList(interaction, interaction2)); writer.end(); writer.close(); Assert.assertEquals(this.interaction_same_experiment_interactors, stringWriter.toString()); } @Test public void test_interactions_same_interactors2() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(new XmlProtein("protein test")); interaction.addParticipant(participant); InteractionEvidence interaction2 = new XmlInteractionEvidence(); ParticipantEvidence participant2 = new XmlParticipantEvidence(participant.getInteractor()); interaction2.addParticipant(participant2); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); interaction2.setExperiment(interaction.getExperiment()); writer.start(); writer.write(Arrays.asList(interaction, interaction2).iterator()); writer.end(); writer.close(); Assert.assertEquals(this.interaction_same_experiment_interactors, stringWriter.toString()); } @Test public void test_single_interaction_complexes() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); Complex complex = new XmlComplex("test complex"); complex.getParticipants().add(new XmlModelledParticipant(new XmlProtein("test protein"))); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(complex); interaction.addParticipant(participant); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); writer.start(); writer.write(interaction); writer.end(); writer.close(); Assert.assertEquals(this.interaction_complexes, stringWriter.toString()); } @Test public void test_single_interaction_complexes_as_Interactor() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); Complex complex = new XmlComplex("test complex"); complex.getParticipants().add(new XmlModelledParticipant(new XmlProtein("test protein"))); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); writer.setWriteComplexesAsInteractors(true); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(complex); interaction.addParticipant(participant); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); writer.start(); writer.write(interaction); writer.end(); writer.close(); Assert.assertEquals(this.interaction_complexes_as_interactor, stringWriter.toString()); } @Test public void test_interactions_different_entries1() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(new XmlProtein("protein test")); interaction.addParticipant(participant); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); writer.start(); writer.write(interaction); writer.write(interaction); writer.end(); writer.close(); Assert.assertEquals(this.interaction_different_entries1, stringWriter.toString()); } @Test public void test_interactions_different_entries2() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(new XmlProtein("protein test")); interaction.addParticipant(participant); InteractionEvidence interaction2 = new XmlInteractionEvidence(); ParticipantEvidence participant2 = new XmlParticipantEvidence(new XmlProtein("protein test2")); interaction2.addParticipant(participant2); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); interaction2.setExperiment(interaction.getExperiment()); writer.start(); writer.write(Arrays.asList(interaction, interaction2)); writer.write(Arrays.asList(interaction, interaction2)); writer.end(); writer.close(); Assert.assertEquals(this.interaction_different_entries2, stringWriter.toString()); } @Test public void test_interactions_different_entries3() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(new XmlProtein("protein test")); interaction.addParticipant(participant); InteractionEvidence interaction2 = new XmlInteractionEvidence(); ParticipantEvidence participant2 = new XmlParticipantEvidence(new XmlProtein("protein test2")); interaction2.addParticipant(participant2); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); interaction2.setExperiment(interaction.getExperiment()); writer.start(); writer.write(Arrays.asList(interaction, interaction2).iterator()); writer.write(Arrays.asList(interaction, interaction2).iterator()); writer.end(); writer.close(); Assert.assertEquals(this.interaction_different_entries2, stringWriter.toString()); } @Test public void test_interaction_source() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(new XmlProtein("protein test")); interaction.addParticipant(participant); Source source = new XmlSource("intact"); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); interaction.getExperiment().getPublication().setSource(source); try { DatatypeFactory datatypeFactory = DatatypeFactory.newInstance(); writer.setDefaultReleaseDate(datatypeFactory.newXMLGregorianCalendar("2013-09-02+00:00")); } catch (DatatypeConfigurationException e) { System.out.println(e); } writer.start(); writer.write(interaction); writer.end(); writer.close(); Assert.assertEquals(this.interaction_source, stringWriter.toString()); } @Test public void test_interactions_different_sources() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); ParticipantEvidence participant = new XmlParticipantEvidence(new XmlProtein("protein test")); interaction.addParticipant(participant); InteractionEvidence interaction2 = new XmlInteractionEvidence(); ParticipantEvidence participant2 = new XmlParticipantEvidence(new XmlProtein("protein test2")); interaction2.addParticipant(participant2); Source source = new XmlSource("mint"); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); interaction2.setExperiment(interaction.getExperiment()); interaction.getExperiment().getPublication().setSource(source); InteractionEvidence interaction3 = new XmlInteractionEvidence(); interaction3.addParticipant(participant); interaction3.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); InteractionEvidence interaction4 = new XmlInteractionEvidence(); interaction4.addParticipant(participant2); interaction4.setExperiment(interaction3.getExperiment()); Source source2 = new XmlSource("intact"); interaction3.getExperiment().getPublication().setSource(source2); try { DatatypeFactory datatypeFactory = DatatypeFactory.newInstance(); writer.setDefaultReleaseDate(datatypeFactory.newXMLGregorianCalendar("2013-09-02+00:00")); } catch (DatatypeConfigurationException e) { System.out.println(e); } writer.start(); writer.write(Arrays.asList(interaction, interaction2, interaction3, interaction4)); writer.end(); writer.close(); Assert.assertEquals(this.interaction_different_source, stringWriter.toString()); } @Test public void test_single_interaction_availability() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); Complex complex = new XmlComplex("test complex"); complex.getParticipants().add(new XmlModelledParticipant(new XmlProtein("protein test"))); ExpandedXmlEvidenceWriter writer = new ExpandedXmlEvidenceWriter(stringWriter); InteractionEvidence interaction = new XmlInteractionEvidence(); interaction.setAvailability("copyright"); ParticipantEvidence participant = new XmlParticipantEvidence(complex.getParticipants().iterator().next().getInteractor()); interaction.addParticipant(participant); interaction.setExperiment(new XmlExperiment(new BibRef("xxxxxx"))); writer.start(); writer.write(interaction); writer.end(); writer.close(); Assert.assertEquals(this.interaction_availability, stringWriter.toString()); } }
/* * Copyright Red Hat Inc. and/or its affiliates and other contributors * as indicated by the authors tag. All rights reserved. * * This copyrighted material is made available to anyone wishing to use, * modify, copy, or redistribute it subject to the terms and conditions * of the GNU General Public License version 2. * * This particular file is subject to the "Classpath" exception as provided in the * LICENSE file that accompanied this code. * * This program is distributed in the hope that it will be useful, but WITHOUT A * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU General Public License for more details. * You should have received a copy of the GNU General Public License, * along with this distribution; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301, USA. */ package com.redhat.ceylon.common.tools; import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import com.redhat.ceylon.common.tool.ArgumentParser; import com.redhat.ceylon.common.tool.EnumerableParser; import com.redhat.ceylon.common.tool.FatalToolError; import com.redhat.ceylon.common.tool.OptionArgumentException; import com.redhat.ceylon.common.tool.OptionArgumentException.ArgumentMultiplicityException; import com.redhat.ceylon.common.tool.OptionArgumentException.InvalidArgumentValueException; import com.redhat.ceylon.common.tool.OptionArgumentException.InvalidOptionValueException; import com.redhat.ceylon.common.tool.OptionArgumentException.OptionMultiplicityException; import com.redhat.ceylon.common.tool.OptionArgumentException.OptionWithoutArgumentException; import com.redhat.ceylon.common.tool.OptionArgumentException.ToolInitializationException; import com.redhat.ceylon.common.tool.OptionArgumentException.UnknownOptionException; import com.redhat.ceylon.common.tool.OptionModel; import com.redhat.ceylon.common.tool.ToolError; import com.redhat.ceylon.common.tool.ToolModel; import com.redhat.ceylon.common.tool.Tools; import com.redhat.ceylon.common.tool.WordWrap; import com.redhat.ceylon.common.tools.help.CeylonHelpTool; /** * Responsible for generating usage messages, trying hard to be helpful */ class Usage { // The following from Apache commons lang3 StringUtils.java /** * <p>Find the Levenshtein distance between two Strings.</p> * * <p>This is the number of changes needed to change one String into * another, where each change is a single character modification (deletion, * insertion or substitution).</p> * * <p>The previous implementation of the Levenshtein distance algorithm * was from <a href="http://www.merriampark.com/ld.htm">http://www.merriampark.com/ld.htm</a></p> * * <p>Chas Emerick has written an implementation in Java, which avoids an OutOfMemoryError * which can occur when my Java implementation is used with very large strings.<br> * This implementation of the Levenshtein distance algorithm * is from <a href="http://www.merriampark.com/ldjava.htm">http://www.merriampark.com/ldjava.htm</a></p> * * <pre> * StringUtils.getLevenshteinDistance(null, *) = IllegalArgumentException * StringUtils.getLevenshteinDistance(*, null) = IllegalArgumentException * StringUtils.getLevenshteinDistance("","") = 0 * StringUtils.getLevenshteinDistance("","a") = 1 * StringUtils.getLevenshteinDistance("aaapppp", "") = 7 * StringUtils.getLevenshteinDistance("frog", "fog") = 1 * StringUtils.getLevenshteinDistance("fly", "ant") = 3 * StringUtils.getLevenshteinDistance("elephant", "hippo") = 7 * StringUtils.getLevenshteinDistance("hippo", "elephant") = 7 * StringUtils.getLevenshteinDistance("hippo", "zzzzzzzz") = 8 * StringUtils.getLevenshteinDistance("hello", "hallo") = 1 * </pre> * * @param s the first String, must not be null * @param t the second String, must not be null * @return result distance * @throws IllegalArgumentException if either String input {@code null} * @since 3.0 Changed signature from getLevenshteinDistance(String, String) to * getLevenshteinDistance(CharSequence, CharSequence) */ public static int getLevenshteinDistance(CharSequence s, CharSequence t) { if (s == null || t == null) { throw new IllegalArgumentException("Strings must not be null"); } /* The difference between this impl. and the previous is that, rather than creating and retaining a matrix of size s.length() + 1 by t.length() + 1, we maintain two single-dimensional arrays of length s.length() + 1. The first, d, is the 'current working' distance array that maintains the newest distance cost counts as we iterate through the characters of String s. Each time we increment the index of String t we are comparing, d is copied to p, the second int[]. Doing so allows us to retain the previous cost counts as required by the algorithm (taking the minimum of the cost count to the left, up one, and diagonally up and to the left of the current cost count being calculated). (Note that the arrays aren't really copied anymore, just switched...this is clearly much better than cloning an array or doing a System.arraycopy() each time through the outer loop.) Effectively, the difference between the two implementations is this one does not cause an out of memory condition when calculating the LD over two very large strings. */ int n = s.length(); // length of s int m = t.length(); // length of t if (n == 0) { return m; } else if (m == 0) { return n; } if (n > m) { // swap the input strings to consume less memory CharSequence tmp = s; s = t; t = tmp; n = m; m = t.length(); } int p[] = new int[n + 1]; //'previous' cost array, horizontally int d[] = new int[n + 1]; // cost array, horizontally int _d[]; //placeholder to assist in swapping p and d // indexes into strings s and t int i; // iterates through s int j; // iterates through t char t_j; // jth character of t int cost; // cost for (i = 0; i <= n; i++) { p[i] = i; } for (j = 1; j <= m; j++) { t_j = t.charAt(j - 1); d[0] = j; for (i = 1; i <= n; i++) { cost = s.charAt(i - 1) == t_j ? 0 : 1; // minimum of cell to the left+1, to the top+1, diagonally left and up +cost d[i] = Math.min(Math.min(d[i - 1] + 1, p[i] + 1), p[i - 1] + cost); } // copy current distance counts to 'previous row' distance counts _d = p; p = d; d = _d; } // our last action in the above loop was to switch d and p, so p now // actually has the most recent cost counts return p[n]; } public static boolean isSuggestionFor(String given, String possibleSuggestion) { int ld = getLevenshteinDistance(given, possibleSuggestion); return ld <= Math.min(given.length() / 2, 4); } private final CeylonTool rootTool; private final String toolName; private final Exception t; private final WordWrap out; Usage(CeylonTool rootTool, String toolName, Exception t) { this.rootTool = rootTool; this.toolName = toolName; this.t = t; this.out = new WordWrap(System.err); } static void handleException(CeylonTool rootTool, String toolName, Exception t) throws Exception { new Usage(rootTool, toolName, t).run(); } void run() throws Exception { if (!validToolName()) { printFirstLineBadToolName(toolName); if (t instanceof InvalidArgumentValueException) { printSuggestions(toolName, rootTool.getPluginLoader().getToolNames()); } printHelpInvocation(); out.flush(); } else { printErrorMessage(); if (t instanceof OptionArgumentException) { printUsage((OptionArgumentException)t); } if ((rootTool.getStacktraces() || t instanceof FatalToolError || t instanceof ToolError == false) && !isNetworkTimeoutException(t)) { out.flush(); t.printStackTrace(System.err); } out.flush(); } } private void printErrorMessage() { StringBuilder sb = new StringBuilder(); sb.append(Tools.progName()); if (toolName != null) { sb.append(' ').append(toolName); } sb.append(": "); if (t instanceof FatalToolError) { sb.append(CeylonToolMessages.msg("fatal.error")).append(": "); sb.append(((FatalToolError)t).getErrorMessage()); } else if (t instanceof ToolError) { sb.append(((ToolError)t).getErrorMessage()); } else if (isNetworkTimeoutException(t)) { if (t.getClass().getName().endsWith(".RepositoryException")) { sb.append(CeylonToolMessages.msg("error.network.timeout.cmr")); } else { sb.append(CeylonToolMessages.msg("error.network.timeout")); } } else { sb.append(t.getLocalizedMessage()); } String[] lines = sb.toString().split("\n"); for (String line : lines) { out.append(line).newline(); } } private boolean isNetworkTimeoutException(Exception ex) { return rootCause(ex) instanceof SocketTimeoutException; } private Throwable rootCause(Throwable th) { if (th.getCause() != null && th.getCause() != th) { return rootCause(th.getCause()); } else { return th; } } private void printFirstLineBadToolName(String toolName) { StringBuilder sb = new StringBuilder(); sb.append(Tools.progName()).append(": "); sb.append(CeylonToolMessages.msg("bad.tool.name", toolName)); out.append(sb.toString()).newline(); } private void printUsage(OptionArgumentException t) throws Exception { // It would be much more natural for OptionArgumentException to have a method // for this, unfortunately the implementation depends on the help tool // which isn't part of the tool API if (t instanceof UnknownOptionException) { UnknownOptionException e = (UnknownOptionException)t; printSynopsis(e.getToolModel()); printOptionSuggestions(e); } else if (t instanceof OptionMultiplicityException) { OptionMultiplicityException e = (OptionMultiplicityException)t; printSynopsis(e.getToolModel()); printOptions(e.getOptionModel().getLongName()); } else if (t instanceof ArgumentMultiplicityException) { ArgumentMultiplicityException e = (ArgumentMultiplicityException)t; printSynopsis(e.getToolModel()); } else if (t instanceof OptionWithoutArgumentException) { OptionWithoutArgumentException e = (OptionWithoutArgumentException)t; printSynopsis(e.getToolModel()); printOptions(e.getOptionModel().getLongName()); printSuggestions(e.getOptionModel().getArgument().getParser(), null); } else if (t instanceof InvalidArgumentValueException) { InvalidArgumentValueException e = (InvalidArgumentValueException)t; printSynopsis(e.getToolModel()); printSuggestions(e.getArgumentModel().getParser(), e.getBadValue()); } else if (t instanceof InvalidOptionValueException) { InvalidOptionValueException e = (InvalidOptionValueException)t; printSynopsis(e.getToolModel()); printOptions(e.getOptionModel().getLongName()); printSuggestions(e.getOptionModel().getArgument().getParser(), e.getBadValue()); } else if (t instanceof ToolInitializationException) { ToolInitializationException e = (ToolInitializationException)t; printSynopsis(e.getToolModel()); } else { printSynopsis(null); } printHelpInvocation(); } private void printSynopsis(ToolModel<?> toolModel) throws Exception { if (!validToolName()) { return; } // Call the help tool to generate the usage out.newline(); out.append(CeylonToolMessages.msg("usage")).newline(); out.flush(); // Can't call rootTool.bootstrap() because that would replace the // rootTool's toolName, which we need when printing option suggestions CeylonHelpTool r = new CeylonHelpTool(); r.setToolLoader(rootTool.getPluginLoader()); r.setSynopsis(true); r.setTool(toolModel); r.run(); out.newline(); } private void printOptions(String option) throws Exception { // Call the help tool to generate the usage out.flush(); // TODO Because the help tool doesn't understand subcommands // it is unable to help about options to subcommands // Can't call rootTool.bootstrap() because that would replace the // rootTool's toolName, which we need when printing option suggestions CeylonHelpTool r = new CeylonHelpTool(); r.setToolLoader(rootTool.getPluginLoader()); r.setOptions(option != null ? option : ""); r.run(); } private void printHelpInvocation() { String helpInvocation = Tools.progName()+ " help"; if (toolName != null && validToolName()) { helpInvocation += " " + toolName; } out.append(CeylonToolMessages.msg("run.ceylon.help", helpInvocation)); out.newline(); } private void printSuggestions(ArgumentParser<?> parser, String badValue) { if (parser instanceof EnumerableParser) { EnumerableParser<?> enumerableParser = (EnumerableParser<?>)parser; printSuggestions(badValue, enumerableParser.possibilities()); } } private void printSuggestions(String badValue, Iterable<String> valids) { List<String> l = new ArrayList<>(); for (String valid : valids) { if (badValue == null || isSuggestionFor(badValue, valid)) { l.add(valid); } } Collections.sort(l); printSuggestions(l); } private void printOptionSuggestions(UnknownOptionException e) { if (e.getAggregating() == null || e.getAggregating().size() == 0) { /*if (!validToolName()) { return; }*/ List<String> suggestions = new ArrayList<>(); for (OptionModel<?> model : e.getToolModel().getOptions()) { if (e.getLongName() != null && isSuggestionFor(e.getLongName(), model.getLongName())) { suggestions.add("--"+model.getLongName()); } if (e.getShortName() != null) { if (model.getShortName() != null) { suggestions.add("-"+model.getShortName()); } if (getLevenshteinDistance(model.getLongName(), e.getShortName().toString()) <= 1) { suggestions.add("--"+model.getLongName()); } } } printSuggestions(suggestions); } } private boolean validToolName() { for(String toolName : rootTool.getToolNames()){ if(rootTool.getToolModel(toolName) == null) return false; } return true; } private void printSuggestions(List<String> l) { if (l.isEmpty()) { return; } out.append(CeylonToolMessages.msg("did.you.mean")).newline(); out.setIndent(4); for (String valid : l) { out.append(valid).newline(); } out.setIndent(0); out.newline(); } }