gt
stringclasses
1 value
context
stringlengths
2.05k
161k
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInspection.bytecodeAnalysis; import com.intellij.codeInspection.dataFlow.DfaUtil; import com.intellij.util.ArrayUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * For lattice, equations and solver description, see http://pat.keldysh.ru/~ilya/faba.pdf (in Russian) */ final class ELattice<T extends Enum<T>> { final T bot; final T top; ELattice(T bot, T top) { this.bot = bot; this.top = top; } final T join(T x, T y) { if (x == bot) return y; if (y == bot) return x; if (x == y) return x; return top; } final T meet(T x, T y) { if (x == top) return y; if (y == top) return x; if (x == y) return x; return bot; } } class ResultUtil { private static final EKey[] EMPTY_PRODUCT = new EKey[0]; private final ELattice<Value> lattice; final Value top; final Value bottom; ResultUtil(ELattice<Value> lattice) { this.lattice = lattice; top = lattice.top; bottom = lattice.bot; } Result join(Result r1, Result r2) { Result result = checkFinal(r1, r2); if (result != null) return result; result = checkFinal(r1, r2); if (result != null) return result; if (r1 instanceof Value && r2 instanceof Value) { return lattice.join((Value)r1, (Value)r2); } if (r1 instanceof Value && r2 instanceof Pending) { return addSingle((Pending)r2, (Value)r1); } if (r1 instanceof Pending && r2 instanceof Value) { return addSingle((Pending)r1, (Value)r2); } assert r1 instanceof Pending && r2 instanceof Pending; Pending pending1 = (Pending)r1; Pending pending2 = (Pending)r2; List<Component> left = Arrays.asList(pending1.delta); List<Component> right = Arrays.asList(pending2.delta); if (left.containsAll(right)) return pending1; if (right.containsAll(left)) return pending2; Set<Component> sum = new HashSet<>(); sum.addAll(left); sum.addAll(right); return new Pending(DfaUtil.upwardsAntichain(sum, (l, r) -> r.isSuperStateOf(l))); } @Nullable private Result checkFinal(Result r1, Result r2) { if (r1 == top) return r1; if (r1 == bottom) return r2; return null; } @NotNull private Result addSingle(Pending pending, Value value) { for (int i = 0; i < pending.delta.length; i++) { Component component = pending.delta[i]; if (component.ids.length == 0) { Value join = lattice.join(component.value, value); if (join == top) { return top; } else if (join == component.value) { return pending; } else { Component[] components = pending.delta.clone(); components[i] = new Component(join, EMPTY_PRODUCT); return new Pending(components); } } } return new Pending(ArrayUtil.append(pending.delta, new Component(value, EMPTY_PRODUCT))); } } final class CoreHKey { final @NotNull MemberDescriptor myMethod; final int dirKey; CoreHKey(@NotNull MemberDescriptor method, int dirKey) { this.myMethod = method; this.dirKey = dirKey; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CoreHKey coreHKey = (CoreHKey)o; return dirKey == coreHKey.dirKey && myMethod.equals(coreHKey.myMethod); } @Override public int hashCode() { return 31 * myMethod.hashCode() + dirKey; } @Override public String toString() { return "CoreHKey [" + myMethod + "|" + Direction.fromInt(dirKey) + "]"; } } final class Solver { private final ELattice<Value> lattice; private final HashMap<EKey, HashSet<EKey>> dependencies = new HashMap<>(); private final HashMap<EKey, Pending> pending = new HashMap<>(); private final HashMap<EKey, Value> solved = new HashMap<>(); private final Stack<EKey> moving = new Stack<>(); private final ResultUtil resultUtil; private final HashMap<CoreHKey, Equation> equations = new HashMap<>(); private final Value unstableValue; Solver(ELattice<Value> lattice, Value unstableValue) { this.lattice = lattice; this.unstableValue = unstableValue; resultUtil = new ResultUtil(lattice); } Result getUnknownResult() { return unstableValue; } void addEquation(Equation equation) { EKey key = equation.key; CoreHKey coreKey = new CoreHKey(key.member, key.dirKey); Equation previousEquation = equations.get(coreKey); if (previousEquation == null) { equations.put(coreKey, equation); } else { EKey joinKey = new EKey(coreKey.myMethod, coreKey.dirKey, equation.key.stable && previousEquation.key.stable, false); Result joinResult = resultUtil.join(equation.result, previousEquation.result); Equation joinEquation = new Equation(joinKey, joinResult); equations.put(coreKey, joinEquation); } } void queueEquation(Equation equation) { Result rhs = equation.result; if (rhs instanceof Value) { solved.put(equation.key, (Value)rhs); moving.push(equation.key); } else if (rhs instanceof Pending) { Pending pendResult = ((Pending)rhs).copy(); Result norm = normalize(pendResult.delta); if (norm instanceof Value) { solved.put(equation.key, (Value)norm); moving.push(equation.key); } else { Pending pendResult1 = ((Pending)rhs).copy(); for (Component component : pendResult1.delta) { for (EKey trigger : component.ids) { HashSet<EKey> set = dependencies.get(trigger); if (set == null) { set = new HashSet<>(); dependencies.put(trigger, set); } set.add(equation.key); } } pending.put(equation.key, pendResult1); } } } Value negate(Value value) { switch (value) { case True: return Value.False; case False: return Value.True; default: return value; } } Map<EKey, Value> solve() { for (Equation equation : equations.values()) { queueEquation(equation); } while (!moving.empty()) { EKey id = moving.pop(); Value value = solved.get(id); EKey[] initialPIds = id.stable ? new EKey[]{id, id.invertStability()} : new EKey[]{id.invertStability(), id}; Value[] initialPVals = id.stable ? new Value[]{value, value} : new Value[]{value, unstableValue}; EKey[] pIds = new EKey[]{initialPIds[0], initialPIds[1], initialPIds[0].negate(), initialPIds[1].negate()}; Value[] pVals = new Value[]{initialPVals[0], initialPVals[1], negate(initialPVals[0]), negate(initialPVals[1])}; for (int i = 0; i < pIds.length; i++) { EKey pId = pIds[i]; Value pVal = pVals[i]; HashSet<EKey> dIds = dependencies.get(pId); if (dIds == null) { continue; } for (EKey dId : dIds) { Pending pend = pending.remove(dId); if (pend != null) { Result pend1 = substitute(pend, pId, pVal); if (pend1 instanceof Value) { solved.put(dId, (Value)pend1); moving.push(dId); } else { pending.put(dId, (Pending)pend1); } } } } } pending.clear(); return solved; } // substitute id -> value into pending Result substitute(@NotNull Pending pending, @NotNull EKey id, @NotNull Value value) { Component[] sum = pending.delta; for (Component intIdComponent : sum) { if (intIdComponent.remove(id)) { intIdComponent.value = lattice.meet(intIdComponent.value, value); } } return normalize(sum); } @NotNull Result normalize(Component @NotNull [] sum) { Value acc = lattice.bot; boolean computableNow = true; for (Component prod : sum) { if (prod.isEmpty() || prod.value == lattice.bot) { acc = lattice.join(acc, prod.value); } else { computableNow = false; } } return (acc == lattice.top || computableNow) ? acc : new Pending(sum); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.updateSettings.impl; import com.intellij.ide.DataManager; import com.intellij.ide.IdeBundle; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.application.ApplicationInfo; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.options.BaseConfigurable; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.options.SearchableConfigurable; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.updateSettings.UpdateStrategyCustomization; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.CollectionComboBoxModel; import com.intellij.ui.JBColor; import com.intellij.ui.components.JBLabel; import com.intellij.ui.components.labels.ActionLink; import com.intellij.util.net.NetUtils; import com.intellij.util.text.DateFormatUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.List; /** * @author pti */ public class UpdateSettingsConfigurable extends BaseConfigurable implements SearchableConfigurable, Configurable.NoScroll { private final UpdateSettings mySettings; private final boolean myCheckNowEnabled; private UpdatesSettingsPanel myPanel; @SuppressWarnings("unused") public UpdateSettingsConfigurable() { this(true); } public UpdateSettingsConfigurable(boolean checkNowEnabled) { mySettings = UpdateSettings.getInstance(); myCheckNowEnabled = checkNowEnabled; } @Override public JComponent createComponent() { myPanel = new UpdatesSettingsPanel(myCheckNowEnabled); return myPanel.myPanel; } @Override public String getDisplayName() { return IdeBundle.message("updates.settings.title"); } @NotNull @Override public String getHelpTopic() { return "preferences.updates"; } @Override @NotNull public String getId() { return getHelpTopic(); } @Override public void apply() throws ConfigurationException { if (myPanel.myUseSecureConnection.isSelected() && !NetUtils.isSniEnabled()) { throw new ConfigurationException(IdeBundle.message("update.sni.disabled.error")); } boolean wasEnabled = mySettings.isCheckNeeded(); mySettings.setCheckNeeded(myPanel.myCheckForUpdates.isSelected()); if (wasEnabled != mySettings.isCheckNeeded()) { UpdateCheckerComponent checker = ApplicationManager.getApplication().getComponent(UpdateCheckerComponent.class); if (checker != null) { if (wasEnabled) { checker.cancelChecks(); } else { checker.queueNextCheck(); } } } mySettings.setSelectedChannelStatus(myPanel.getSelectedChannelType()); mySettings.setSecureConnection(myPanel.myUseSecureConnection.isSelected()); } @Override public void reset() { myPanel.myCheckForUpdates.setSelected(mySettings.isCheckNeeded()); myPanel.myUseSecureConnection.setSelected(mySettings.isSecureConnection()); myPanel.updateLastCheckedLabel(); myPanel.setSelectedChannelType(mySettings.getSelectedActiveChannel()); } @Override public boolean isModified() { if (myPanel == null) { return false; } if (mySettings.isCheckNeeded() != myPanel.myCheckForUpdates.isSelected() || mySettings.isSecureConnection() != myPanel.myUseSecureConnection.isSelected()) { return true; } Object channel = myPanel.myUpdateChannels.getSelectedItem(); return channel != null && !channel.equals(mySettings.getSelectedActiveChannel()); } @Override public void disposeUIResources() { myPanel = null; } private static class UpdatesSettingsPanel { private final UpdateSettings mySettings; private JPanel myPanel; private JCheckBox myCheckForUpdates; private JComboBox<ChannelStatus> myUpdateChannels; private JButton myCheckNow; private JBLabel myChannelWarning; private JCheckBox myUseSecureConnection; private JLabel myBuildNumber; private JLabel myVersionNumber; private JLabel myLastCheckedDate; @SuppressWarnings("unused") private ActionLink myIgnoredBuildsLink; public UpdatesSettingsPanel(boolean checkNowEnabled) { mySettings = UpdateSettings.getInstance(); ApplicationInfo appInfo = ApplicationInfo.getInstance(); String majorVersion = appInfo.getMajorVersion(); String versionNumber = ""; if (majorVersion != null && majorVersion.trim().length() > 0) { String minorVersion = appInfo.getMinorVersion(); if (minorVersion != null && minorVersion.trim().length() > 0) { versionNumber = majorVersion + "." + minorVersion; } else { versionNumber = majorVersion + ".0"; } } myVersionNumber.setText(appInfo.getVersionName() + " " + versionNumber); myBuildNumber.setText(appInfo.getBuild().asString()); LabelTextReplacingUtil.replaceText(myPanel); if (checkNowEnabled) { myCheckNow.addActionListener(e -> { Project project = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(myCheckNow)); UpdateSettings settings = new UpdateSettings(); settings.loadState(mySettings.getState()); settings.setSelectedChannelStatus(getSelectedChannelType()); settings.setSecureConnection(myUseSecureConnection.isSelected()); UpdateChecker.updateAndShowResult(project, settings); updateLastCheckedLabel(); }); } else { myCheckNow.setVisible(false); } UpdateStrategyCustomization tweaker = UpdateStrategyCustomization.getInstance(); ChannelStatus current = mySettings.getSelectedActiveChannel(); myUpdateChannels.setModel(new CollectionComboBoxModel<>(mySettings.getActiveChannels(), current)); myUpdateChannels.setEnabled(!ApplicationInfoEx.getInstanceEx().isEAP() || !tweaker.forceEapUpdateChannelForEapBuilds()); myUpdateChannels.addActionListener(e -> { boolean lessStable = current.compareTo(getSelectedChannelType()) > 0; myChannelWarning.setVisible(lessStable); }); myChannelWarning.setForeground(JBColor.RED); } private void createUIComponents() { myIgnoredBuildsLink = new ActionLink(IdeBundle.message("updates.settings.ignored"), new AnAction() { @Override public void actionPerformed(AnActionEvent e) { List<String> buildNumbers = mySettings.getIgnoredBuildNumbers(); String text = StringUtil.join(buildNumbers, "\n"); String result = Messages.showMultilineInputDialog(null, null, IdeBundle.message("updates.settings.ignored.title"), text, null, null); if (result != null) { buildNumbers.clear(); buildNumbers.addAll(StringUtil.split(result, "\n")); } } }); } private void updateLastCheckedLabel() { long time = mySettings.getLastTimeChecked(); myLastCheckedDate.setText(time == 0 ? IdeBundle.message("updates.last.check.never") : DateFormatUtil.formatPrettyDateTime(time)); } public ChannelStatus getSelectedChannelType() { return (ChannelStatus)myUpdateChannels.getSelectedItem(); } public void setSelectedChannelType(ChannelStatus channelType) { myUpdateChannels.setSelectedItem(channelType != null ? channelType : ChannelStatus.RELEASE); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.airlift.stats.cardinality; import io.airlift.slice.DynamicSliceOutput; import io.airlift.slice.Murmur3; import io.airlift.slice.Slice; import io.airlift.slice.SliceOutput; import org.testng.annotations.Test; import static io.airlift.slice.testing.SliceAssertions.assertSlicesEqual; import static io.airlift.stats.cardinality.Utils.numberOfBuckets; import static org.testng.Assert.assertEquals; public class TestDenseSerialization { @Test public void testEmpty() throws Exception { SliceOutput expected = new DynamicSliceOutput(1) .appendByte(3) // format tag .appendByte(12) // p .appendByte(0); // baseline for (int i = 0; i < 1 << (12 - 1); i++) { expected.appendByte(0); } // overflows expected.appendByte(0) .appendByte(0); assertSlicesEqual(makeHll(12).serialize(), expected.slice()); } @Test public void testSingleNoOverflow() throws Exception { byte[] buckets = new byte[1 << (12 - 1)]; buckets[326] = 0b0000_0001; Slice expected = new DynamicSliceOutput(1) .appendByte(3) // format tag .appendByte(12) // p .appendByte(0) // baseline .appendBytes(buckets) // buckets // overflows .appendByte(0) .appendByte(0) .slice(); assertSlicesEqual(makeHll(12, 0).serialize(), expected); } @Test public void testSingleWithOverflow() throws Exception { byte[] buckets = new byte[1 << (12 - 1)]; buckets[1353] = (byte) 0b1111_0000; Slice expected = new DynamicSliceOutput(1) .appendByte(3) // format tag .appendByte(12) // p .appendByte(0) // baseline .appendBytes(buckets) // buckets // overflows .appendByte(1) .appendByte(0) // overflow bucket .appendByte(0x92) .appendByte(0xA) // overflow value .appendByte(2) .slice(); assertSlicesEqual(makeHll(12, 61697).serialize(), expected); } @Test public void testMultipleOverflow() throws Exception { byte[] buckets = new byte[1 << (12 - 1)]; buckets[1353] = (byte) 0b1111_0000; buckets[2024] = (byte) 0b1111_0000; Slice expected = new DynamicSliceOutput(1) .appendByte(3) // format tag .appendByte(12) // p .appendByte(0) // baseline .appendBytes(buckets) // buckets // overflows .appendByte(2) .appendByte(0) // overflow bucket .appendByte(146) .appendByte(10) .appendByte(208) .appendByte(15) // overflow value .appendByte(2) .appendByte(4) .slice(); assertSlicesEqual(makeHll(12, 61697, 394873).serialize(), expected); // test commutativity assertSlicesEqual(makeHll(12, 394873, 61697).serialize(), expected); } @Test public void testMergeWithOverflows() throws Exception { DenseHll expected = makeHll(4, 37227, 93351); assertSlicesEqual( makeHll(4, 37227).mergeWith(makeHll(4, 93351)).serialize(), expected.serialize()); // test commutativity assertSlicesEqual( makeHll(4, 93351).mergeWith(makeHll(4, 37227)).serialize(), expected.serialize()); } @Test public void testBaselineAdjusment() throws Exception { byte[] buckets = new byte[] {0x45, 0x23, 0x01, 0x31, 0x22, 0x05, 0x04, 0x01}; Slice expected = new DynamicSliceOutput(1) .appendByte(3) // format tag .appendByte(4) // p .appendByte(2) // baseline .appendBytes(buckets) // buckets // overflows .appendByte(0) .appendByte(0) .slice(); DenseHll hll = new DenseHll(4); for (int i = 0; i < 100; i++) { hll.insertHash(Murmur3.hash64(i)); } assertSlicesEqual(hll.serialize(), expected); } @Test public void testOverflowAfterBaselineIncrement() throws Exception { byte[] buckets = new byte[] {0x45, 0x23, 0x01, 0x31, 0x22, 0x05, 0x04, (byte) 0xF1}; Slice expected = new DynamicSliceOutput(1) .appendByte(3) // format tag .appendByte(4) // p .appendByte(2) // baseline .appendBytes(buckets) // buckets // overflows .appendByte(1) .appendByte(0) // overflow bucket .appendByte(14) .appendByte(0) // overflow value .appendByte(5) .slice(); DenseHll hll = new DenseHll(4); for (int i = 0; i < 100; i++) { hll.insertHash(Murmur3.hash64(i)); } hll.insertHash(Murmur3.hash64(37227)); assertSlicesEqual(hll.serialize(), expected); } @Test public void testBaselineAdjustmentAfterOverflow() throws Exception { byte[] buckets = new byte[] {0x45, 0x23, 0x01, 0x31, 0x22, 0x05, 0x04, (byte) 0xF1}; Slice expected = new DynamicSliceOutput(1) .appendByte(3) // format tag .appendByte(4) // p .appendByte(2) // baseline .appendBytes(buckets) // buckets // overflows .appendByte(1) .appendByte(0) // overflow bucket .appendByte(14) .appendByte(0) // overflow value .appendByte(5) .slice(); DenseHll hll = new DenseHll(4); hll.insertHash(Murmur3.hash64(37227)); for (int i = 0; i < 100; i++) { hll.insertHash(Murmur3.hash64(i)); } assertSlicesEqual(hll.serialize(), expected); } @Test public void testRoundtrip() throws Exception { DenseHll hll = new DenseHll(4); for (int i = 0; i < 1000; i++) { hll.insertHash(Murmur3.hash64(i)); Slice serialized = hll.serialize(); Slice reserialized = new DenseHll(serialized).serialize(); assertSlicesEqual(serialized, reserialized); } } @Test public void testDeserializeDenseV1NoOverflows() throws Exception { int indexBitLength = 4; int numberOfBuckets = numberOfBuckets(indexBitLength); Slice serialized = new DynamicSliceOutput(1) .appendByte(Format.DENSE_V1.getTag()) // format tag .appendByte(indexBitLength) // p .appendByte(10) // baseline .appendBytes(new byte[numberOfBuckets / 2]) // buckets // overflow bucket .appendByte(0xFF) .appendByte(0xFF) // overflow value .appendByte(0) .slice(); DenseHll deserialized = new DenseHll(serialized); for (int i = 0; i < numberOfBuckets; i++) { assertEquals(deserialized.getValue(i), 10); } deserialized.verify(); } @Test public void testDeserializeDenseV1EmptyOverflow() throws Exception { // bucket 1 has a value of 17 (i.e., baseline = 2, delta == 15 and overflow is present with a value of 0) int indexBitLength = 4; int numberOfBuckets = numberOfBuckets(indexBitLength); Slice serialized = new DynamicSliceOutput(1) .appendByte(Format.DENSE_V1.getTag()) // format tag .appendByte(indexBitLength) // p .appendByte(2) // baseline .appendBytes(new byte[] { 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}) // buckets // overflow bucket .appendByte(0x01) .appendByte(0x00) // overflow value .appendByte(0) .slice(); DenseHll deserialized = new DenseHll(serialized); for (int i = 0; i < numberOfBuckets; i++) { if (i == 1) { assertEquals(deserialized.getValue(i), 17); } else { assertEquals(deserialized.getValue(i), 2); } } deserialized.verify(); } @Test public void testDeserializeDenseV1Overflow() throws Exception { // bucket 1 has a value of 20 (i.e., baseline = 2, delta == 15, overflow == 3) int indexBitLength = 4; int numberOfBuckets = numberOfBuckets(indexBitLength); Slice serialized = new DynamicSliceOutput(1) .appendByte(Format.DENSE_V1.getTag()) // format tag .appendByte(indexBitLength) // p .appendByte(2) // baseline .appendBytes(new byte[] { 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}) // buckets // overflow bucket .appendByte(0x01) .appendByte(0x00) // overflow value .appendByte(3) .slice(); DenseHll deserialized = new DenseHll(serialized); for (int i = 0; i < numberOfBuckets; i++) { if (i == 1) { assertEquals(deserialized.getValue(i), 20); } else { assertEquals(deserialized.getValue(i), 2); } } deserialized.verify(); } private static DenseHll makeHll(int indexBits, long... values) { DenseHll result = new DenseHll(indexBits); for (long value : values) { result.insertHash(Murmur3.hash64(value)); } return result; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.metadata; import com.facebook.presto.operator.aggregation.ApproximateCountDistinctAggregation; import com.facebook.presto.operator.aggregation.ApproximateDoublePercentileAggregations; import com.facebook.presto.operator.aggregation.ApproximateDoublePercentileArrayAggregations; import com.facebook.presto.operator.aggregation.ApproximateLongPercentileAggregations; import com.facebook.presto.operator.aggregation.ApproximateLongPercentileArrayAggregations; import com.facebook.presto.operator.aggregation.ApproximateRealPercentileAggregations; import com.facebook.presto.operator.aggregation.ApproximateRealPercentileArrayAggregations; import com.facebook.presto.operator.aggregation.ApproximateSetAggregation; import com.facebook.presto.operator.aggregation.AverageAggregations; import com.facebook.presto.operator.aggregation.BitwiseAndAggregation; import com.facebook.presto.operator.aggregation.BitwiseOrAggregation; import com.facebook.presto.operator.aggregation.BooleanAndAggregation; import com.facebook.presto.operator.aggregation.BooleanOrAggregation; import com.facebook.presto.operator.aggregation.CentralMomentsAggregation; import com.facebook.presto.operator.aggregation.ClassificationFallOutAggregation; import com.facebook.presto.operator.aggregation.ClassificationMissRateAggregation; import com.facebook.presto.operator.aggregation.ClassificationPrecisionAggregation; import com.facebook.presto.operator.aggregation.ClassificationRecallAggregation; import com.facebook.presto.operator.aggregation.ClassificationThresholdsAggregation; import com.facebook.presto.operator.aggregation.CountAggregation; import com.facebook.presto.operator.aggregation.CountIfAggregation; import com.facebook.presto.operator.aggregation.DefaultApproximateCountDistinctAggregation; import com.facebook.presto.operator.aggregation.DoubleCorrelationAggregation; import com.facebook.presto.operator.aggregation.DoubleCovarianceAggregation; import com.facebook.presto.operator.aggregation.DoubleHistogramAggregation; import com.facebook.presto.operator.aggregation.DoubleRegressionAggregation; import com.facebook.presto.operator.aggregation.DoubleSumAggregation; import com.facebook.presto.operator.aggregation.EntropyAggregation; import com.facebook.presto.operator.aggregation.GeometricMeanAggregations; import com.facebook.presto.operator.aggregation.InternalAggregationFunction; import com.facebook.presto.operator.aggregation.IntervalDayToSecondAverageAggregation; import com.facebook.presto.operator.aggregation.IntervalDayToSecondSumAggregation; import com.facebook.presto.operator.aggregation.IntervalYearToMonthAverageAggregation; import com.facebook.presto.operator.aggregation.IntervalYearToMonthSumAggregation; import com.facebook.presto.operator.aggregation.LongSumAggregation; import com.facebook.presto.operator.aggregation.MaxDataSizeForStats; import com.facebook.presto.operator.aggregation.MergeHyperLogLogAggregation; import com.facebook.presto.operator.aggregation.MergeQuantileDigestFunction; import com.facebook.presto.operator.aggregation.MergeTDigestFunction; import com.facebook.presto.operator.aggregation.RealCorrelationAggregation; import com.facebook.presto.operator.aggregation.RealCovarianceAggregation; import com.facebook.presto.operator.aggregation.RealGeometricMeanAggregations; import com.facebook.presto.operator.aggregation.RealHistogramAggregation; import com.facebook.presto.operator.aggregation.RealRegressionAggregation; import com.facebook.presto.operator.aggregation.RealSumAggregation; import com.facebook.presto.operator.aggregation.SumDataSizeForStats; import com.facebook.presto.operator.aggregation.VarianceAggregation; import com.facebook.presto.operator.aggregation.arrayagg.ArrayAggregationFunction; import com.facebook.presto.operator.aggregation.differentialentropy.DifferentialEntropyAggregation; import com.facebook.presto.operator.aggregation.histogram.Histogram; import com.facebook.presto.operator.aggregation.multimapagg.MultimapAggregationFunction; import com.facebook.presto.operator.scalar.ArrayAllMatchFunction; import com.facebook.presto.operator.scalar.ArrayAnyMatchFunction; import com.facebook.presto.operator.scalar.ArrayCardinalityFunction; import com.facebook.presto.operator.scalar.ArrayCombinationsFunction; import com.facebook.presto.operator.scalar.ArrayContains; import com.facebook.presto.operator.scalar.ArrayDistinctFromOperator; import com.facebook.presto.operator.scalar.ArrayDistinctFunction; import com.facebook.presto.operator.scalar.ArrayElementAtFunction; import com.facebook.presto.operator.scalar.ArrayEqualOperator; import com.facebook.presto.operator.scalar.ArrayExceptFunction; import com.facebook.presto.operator.scalar.ArrayFilterFunction; import com.facebook.presto.operator.scalar.ArrayFunctions; import com.facebook.presto.operator.scalar.ArrayGreaterThanOperator; import com.facebook.presto.operator.scalar.ArrayGreaterThanOrEqualOperator; import com.facebook.presto.operator.scalar.ArrayHashCodeOperator; import com.facebook.presto.operator.scalar.ArrayIndeterminateOperator; import com.facebook.presto.operator.scalar.ArrayIntersectFunction; import com.facebook.presto.operator.scalar.ArrayLessThanOperator; import com.facebook.presto.operator.scalar.ArrayLessThanOrEqualOperator; import com.facebook.presto.operator.scalar.ArrayMaxFunction; import com.facebook.presto.operator.scalar.ArrayMinFunction; import com.facebook.presto.operator.scalar.ArrayNgramsFunction; import com.facebook.presto.operator.scalar.ArrayNoneMatchFunction; import com.facebook.presto.operator.scalar.ArrayNotEqualOperator; import com.facebook.presto.operator.scalar.ArrayPositionFunction; import com.facebook.presto.operator.scalar.ArrayRemoveFunction; import com.facebook.presto.operator.scalar.ArrayReverseFunction; import com.facebook.presto.operator.scalar.ArrayShuffleFunction; import com.facebook.presto.operator.scalar.ArraySliceFunction; import com.facebook.presto.operator.scalar.ArraySortComparatorFunction; import com.facebook.presto.operator.scalar.ArraySortFunction; import com.facebook.presto.operator.scalar.ArrayUnionFunction; import com.facebook.presto.operator.scalar.ArraysOverlapFunction; import com.facebook.presto.operator.scalar.BitwiseFunctions; import com.facebook.presto.operator.scalar.BuiltInScalarFunctionImplementation; import com.facebook.presto.operator.scalar.CharacterStringCasts; import com.facebook.presto.operator.scalar.ColorFunctions; import com.facebook.presto.operator.scalar.CombineHashFunction; import com.facebook.presto.operator.scalar.DataSizeFunctions; import com.facebook.presto.operator.scalar.DateTimeFunctions; import com.facebook.presto.operator.scalar.EmptyMapConstructor; import com.facebook.presto.operator.scalar.FailureFunction; import com.facebook.presto.operator.scalar.HmacFunctions; import com.facebook.presto.operator.scalar.HyperLogLogFunctions; import com.facebook.presto.operator.scalar.IpPrefixFunctions; import com.facebook.presto.operator.scalar.JoniRegexpCasts; import com.facebook.presto.operator.scalar.JoniRegexpFunctions; import com.facebook.presto.operator.scalar.JoniRegexpReplaceLambdaFunction; import com.facebook.presto.operator.scalar.JsonFunctions; import com.facebook.presto.operator.scalar.JsonOperators; import com.facebook.presto.operator.scalar.MapCardinalityFunction; import com.facebook.presto.operator.scalar.MapDistinctFromOperator; import com.facebook.presto.operator.scalar.MapEntriesFunction; import com.facebook.presto.operator.scalar.MapEqualOperator; import com.facebook.presto.operator.scalar.MapFromEntriesFunction; import com.facebook.presto.operator.scalar.MapIndeterminateOperator; import com.facebook.presto.operator.scalar.MapKeys; import com.facebook.presto.operator.scalar.MapNotEqualOperator; import com.facebook.presto.operator.scalar.MapSubscriptOperator; import com.facebook.presto.operator.scalar.MapValues; import com.facebook.presto.operator.scalar.MathFunctions; import com.facebook.presto.operator.scalar.MathFunctions.LegacyLogFunction; import com.facebook.presto.operator.scalar.MultimapFromEntriesFunction; import com.facebook.presto.operator.scalar.QuantileDigestFunctions; import com.facebook.presto.operator.scalar.Re2JRegexpFunctions; import com.facebook.presto.operator.scalar.Re2JRegexpReplaceLambdaFunction; import com.facebook.presto.operator.scalar.RepeatFunction; import com.facebook.presto.operator.scalar.SequenceFunction; import com.facebook.presto.operator.scalar.SessionFunctions; import com.facebook.presto.operator.scalar.SplitToMapFunction; import com.facebook.presto.operator.scalar.SplitToMultimapFunction; import com.facebook.presto.operator.scalar.StringFunctions; import com.facebook.presto.operator.scalar.TDigestFunctions; import com.facebook.presto.operator.scalar.TryFunction; import com.facebook.presto.operator.scalar.TypeOfFunction; import com.facebook.presto.operator.scalar.UrlFunctions; import com.facebook.presto.operator.scalar.VarbinaryFunctions; import com.facebook.presto.operator.scalar.WilsonInterval; import com.facebook.presto.operator.scalar.WordStemFunction; import com.facebook.presto.operator.window.CumulativeDistributionFunction; import com.facebook.presto.operator.window.DenseRankFunction; import com.facebook.presto.operator.window.FirstValueFunction; import com.facebook.presto.operator.window.LagFunction; import com.facebook.presto.operator.window.LastValueFunction; import com.facebook.presto.operator.window.LeadFunction; import com.facebook.presto.operator.window.NTileFunction; import com.facebook.presto.operator.window.NthValueFunction; import com.facebook.presto.operator.window.PercentRankFunction; import com.facebook.presto.operator.window.RankFunction; import com.facebook.presto.operator.window.RowNumberFunction; import com.facebook.presto.operator.window.SqlWindowFunction; import com.facebook.presto.operator.window.WindowFunctionSupplier; import com.facebook.presto.spi.CatalogSchemaName; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockEncodingSerde; import com.facebook.presto.spi.block.BlockSerdeUtil; import com.facebook.presto.spi.function.AlterRoutineCharacteristics; import com.facebook.presto.spi.function.FunctionHandle; import com.facebook.presto.spi.function.FunctionMetadata; import com.facebook.presto.spi.function.FunctionNamespaceManager; import com.facebook.presto.spi.function.FunctionNamespaceTransactionHandle; import com.facebook.presto.spi.function.OperatorType; import com.facebook.presto.spi.function.QualifiedFunctionName; import com.facebook.presto.spi.function.ScalarFunctionImplementation; import com.facebook.presto.spi.function.Signature; import com.facebook.presto.spi.function.SqlFunction; import com.facebook.presto.spi.function.SqlFunctionVisibility; import com.facebook.presto.spi.function.SqlInvokedFunction; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeManager; import com.facebook.presto.spi.type.TypeSignature; import com.facebook.presto.sql.analyzer.FeaturesConfig; import com.facebook.presto.sql.analyzer.TypeSignatureProvider; import com.facebook.presto.type.BigintOperators; import com.facebook.presto.type.BooleanOperators; import com.facebook.presto.type.CharOperators; import com.facebook.presto.type.ColorOperators; import com.facebook.presto.type.DateOperators; import com.facebook.presto.type.DateTimeOperators; import com.facebook.presto.type.DecimalOperators; import com.facebook.presto.type.DoubleOperators; import com.facebook.presto.type.HyperLogLogOperators; import com.facebook.presto.type.IntegerOperators; import com.facebook.presto.type.IntervalDayTimeOperators; import com.facebook.presto.type.IntervalYearMonthOperators; import com.facebook.presto.type.IpAddressOperators; import com.facebook.presto.type.IpPrefixOperators; import com.facebook.presto.type.LikeFunctions; import com.facebook.presto.type.QuantileDigestOperators; import com.facebook.presto.type.RealOperators; import com.facebook.presto.type.SmallintOperators; import com.facebook.presto.type.TDigestOperators; import com.facebook.presto.type.TimeOperators; import com.facebook.presto.type.TimeWithTimeZoneOperators; import com.facebook.presto.type.TimestampOperators; import com.facebook.presto.type.TimestampWithTimeZoneOperators; import com.facebook.presto.type.TinyintOperators; import com.facebook.presto.type.UnknownOperators; import com.facebook.presto.type.VarbinaryOperators; import com.facebook.presto.type.VarcharOperators; import com.facebook.presto.type.khyperloglog.KHyperLogLogAggregationFunction; import com.facebook.presto.type.khyperloglog.KHyperLogLogFunctions; import com.facebook.presto.type.khyperloglog.KHyperLogLogOperators; import com.facebook.presto.type.khyperloglog.MergeKHyperLogLogAggregationFunction; import com.facebook.presto.type.setdigest.BuildSetDigestAggregation; import com.facebook.presto.type.setdigest.MergeSetDigestAggregation; import com.facebook.presto.type.setdigest.SetDigestFunctions; import com.facebook.presto.type.setdigest.SetDigestOperators; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.google.common.util.concurrent.UncheckedExecutionException; import io.airlift.slice.Slice; import javax.annotation.concurrent.ThreadSafe; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import static com.facebook.presto.metadata.SignatureBinder.applyBoundVariables; import static com.facebook.presto.operator.aggregation.ArbitraryAggregationFunction.ARBITRARY_AGGREGATION; import static com.facebook.presto.operator.aggregation.ChecksumAggregationFunction.CHECKSUM_AGGREGATION; import static com.facebook.presto.operator.aggregation.CountColumn.COUNT_COLUMN; import static com.facebook.presto.operator.aggregation.DecimalAverageAggregation.DECIMAL_AVERAGE_AGGREGATION; import static com.facebook.presto.operator.aggregation.DecimalSumAggregation.DECIMAL_SUM_AGGREGATION; import static com.facebook.presto.operator.aggregation.MapAggregationFunction.MAP_AGG; import static com.facebook.presto.operator.aggregation.MapUnionAggregation.MAP_UNION; import static com.facebook.presto.operator.aggregation.MaxAggregationFunction.MAX_AGGREGATION; import static com.facebook.presto.operator.aggregation.MaxNAggregationFunction.MAX_N_AGGREGATION; import static com.facebook.presto.operator.aggregation.MinAggregationFunction.MIN_AGGREGATION; import static com.facebook.presto.operator.aggregation.MinNAggregationFunction.MIN_N_AGGREGATION; import static com.facebook.presto.operator.aggregation.QuantileDigestAggregationFunction.QDIGEST_AGG; import static com.facebook.presto.operator.aggregation.QuantileDigestAggregationFunction.QDIGEST_AGG_WITH_WEIGHT; import static com.facebook.presto.operator.aggregation.QuantileDigestAggregationFunction.QDIGEST_AGG_WITH_WEIGHT_AND_ERROR; import static com.facebook.presto.operator.aggregation.RealAverageAggregation.REAL_AVERAGE_AGGREGATION; import static com.facebook.presto.operator.aggregation.ReduceAggregationFunction.REDUCE_AGG; import static com.facebook.presto.operator.aggregation.TDigestAggregationFunction.TDIGEST_AGG; import static com.facebook.presto.operator.aggregation.TDigestAggregationFunction.TDIGEST_AGG_WITH_WEIGHT; import static com.facebook.presto.operator.aggregation.TDigestAggregationFunction.TDIGEST_AGG_WITH_WEIGHT_AND_COMPRESSION; import static com.facebook.presto.operator.aggregation.minmaxby.MaxByAggregationFunction.MAX_BY; import static com.facebook.presto.operator.aggregation.minmaxby.MaxByNAggregationFunction.MAX_BY_N_AGGREGATION; import static com.facebook.presto.operator.aggregation.minmaxby.MinByAggregationFunction.MIN_BY; import static com.facebook.presto.operator.aggregation.minmaxby.MinByNAggregationFunction.MIN_BY_N_AGGREGATION; import static com.facebook.presto.operator.scalar.ArrayConcatFunction.ARRAY_CONCAT_FUNCTION; import static com.facebook.presto.operator.scalar.ArrayConstructor.ARRAY_CONSTRUCTOR; import static com.facebook.presto.operator.scalar.ArrayFlattenFunction.ARRAY_FLATTEN_FUNCTION; import static com.facebook.presto.operator.scalar.ArrayJoin.ARRAY_JOIN; import static com.facebook.presto.operator.scalar.ArrayJoin.ARRAY_JOIN_WITH_NULL_REPLACEMENT; import static com.facebook.presto.operator.scalar.ArrayReduceFunction.ARRAY_REDUCE_FUNCTION; import static com.facebook.presto.operator.scalar.ArraySubscriptOperator.ARRAY_SUBSCRIPT; import static com.facebook.presto.operator.scalar.ArrayToArrayCast.ARRAY_TO_ARRAY_CAST; import static com.facebook.presto.operator.scalar.ArrayToElementConcatFunction.ARRAY_TO_ELEMENT_CONCAT_FUNCTION; import static com.facebook.presto.operator.scalar.ArrayToJsonCast.ARRAY_TO_JSON; import static com.facebook.presto.operator.scalar.ArrayTransformFunction.ARRAY_TRANSFORM_FUNCTION; import static com.facebook.presto.operator.scalar.BuiltInScalarFunctionImplementation.ArgumentProperty.valueTypeArgumentProperty; import static com.facebook.presto.operator.scalar.BuiltInScalarFunctionImplementation.NullConvention.RETURN_NULL_ON_NULL; import static com.facebook.presto.operator.scalar.CastFromUnknownOperator.CAST_FROM_UNKNOWN; import static com.facebook.presto.operator.scalar.ConcatFunction.VARBINARY_CONCAT; import static com.facebook.presto.operator.scalar.ConcatFunction.VARCHAR_CONCAT; import static com.facebook.presto.operator.scalar.ElementToArrayConcatFunction.ELEMENT_TO_ARRAY_CONCAT_FUNCTION; import static com.facebook.presto.operator.scalar.Greatest.GREATEST; import static com.facebook.presto.operator.scalar.IdentityCast.IDENTITY_CAST; import static com.facebook.presto.operator.scalar.JsonStringToArrayCast.JSON_STRING_TO_ARRAY; import static com.facebook.presto.operator.scalar.JsonStringToMapCast.JSON_STRING_TO_MAP; import static com.facebook.presto.operator.scalar.JsonStringToRowCast.JSON_STRING_TO_ROW; import static com.facebook.presto.operator.scalar.JsonToArrayCast.JSON_TO_ARRAY; import static com.facebook.presto.operator.scalar.JsonToMapCast.JSON_TO_MAP; import static com.facebook.presto.operator.scalar.JsonToRowCast.JSON_TO_ROW; import static com.facebook.presto.operator.scalar.Least.LEAST; import static com.facebook.presto.operator.scalar.MapConcatFunction.MAP_CONCAT_FUNCTION; import static com.facebook.presto.operator.scalar.MapConstructor.MAP_CONSTRUCTOR; import static com.facebook.presto.operator.scalar.MapElementAtFunction.MAP_ELEMENT_AT; import static com.facebook.presto.operator.scalar.MapFilterFunction.MAP_FILTER_FUNCTION; import static com.facebook.presto.operator.scalar.MapHashCodeOperator.MAP_HASH_CODE; import static com.facebook.presto.operator.scalar.MapToJsonCast.MAP_TO_JSON; import static com.facebook.presto.operator.scalar.MapToMapCast.MAP_TO_MAP_CAST; import static com.facebook.presto.operator.scalar.MapTransformKeyFunction.MAP_TRANSFORM_KEY_FUNCTION; import static com.facebook.presto.operator.scalar.MapTransformValueFunction.MAP_TRANSFORM_VALUE_FUNCTION; import static com.facebook.presto.operator.scalar.MapZipWithFunction.MAP_ZIP_WITH_FUNCTION; import static com.facebook.presto.operator.scalar.MathFunctions.DECIMAL_MOD_FUNCTION; import static com.facebook.presto.operator.scalar.Re2JCastToRegexpFunction.castCharToRe2JRegexp; import static com.facebook.presto.operator.scalar.Re2JCastToRegexpFunction.castVarcharToRe2JRegexp; import static com.facebook.presto.operator.scalar.RowDistinctFromOperator.ROW_DISTINCT_FROM; import static com.facebook.presto.operator.scalar.RowEqualOperator.ROW_EQUAL; import static com.facebook.presto.operator.scalar.RowGreaterThanOperator.ROW_GREATER_THAN; import static com.facebook.presto.operator.scalar.RowGreaterThanOrEqualOperator.ROW_GREATER_THAN_OR_EQUAL; import static com.facebook.presto.operator.scalar.RowHashCodeOperator.ROW_HASH_CODE; import static com.facebook.presto.operator.scalar.RowIndeterminateOperator.ROW_INDETERMINATE; import static com.facebook.presto.operator.scalar.RowLessThanOperator.ROW_LESS_THAN; import static com.facebook.presto.operator.scalar.RowLessThanOrEqualOperator.ROW_LESS_THAN_OR_EQUAL; import static com.facebook.presto.operator.scalar.RowNotEqualOperator.ROW_NOT_EQUAL; import static com.facebook.presto.operator.scalar.RowToJsonCast.ROW_TO_JSON; import static com.facebook.presto.operator.scalar.RowToRowCast.ROW_TO_ROW_CAST; import static com.facebook.presto.operator.scalar.TryCastFunction.TRY_CAST; import static com.facebook.presto.operator.scalar.ZipFunction.ZIP_FUNCTIONS; import static com.facebook.presto.operator.scalar.ZipWithFunction.ZIP_WITH_FUNCTION; import static com.facebook.presto.operator.window.AggregateWindowFunction.supplier; import static com.facebook.presto.spi.StandardErrorCode.FUNCTION_IMPLEMENTATION_MISSING; import static com.facebook.presto.spi.StandardErrorCode.GENERIC_USER_ERROR; import static com.facebook.presto.spi.function.FunctionImplementationType.BUILTIN; import static com.facebook.presto.spi.function.FunctionKind.AGGREGATE; import static com.facebook.presto.spi.function.FunctionKind.SCALAR; import static com.facebook.presto.spi.function.FunctionKind.WINDOW; import static com.facebook.presto.spi.function.OperatorType.tryGetOperatorType; import static com.facebook.presto.spi.function.SqlFunctionVisibility.HIDDEN; import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature; import static com.facebook.presto.sql.analyzer.TypeSignatureProvider.fromTypeSignatures; import static com.facebook.presto.sql.planner.LiteralEncoder.MAGIC_LITERAL_FUNCTION_PREFIX; import static com.facebook.presto.type.DecimalCasts.BIGINT_TO_DECIMAL_CAST; import static com.facebook.presto.type.DecimalCasts.BOOLEAN_TO_DECIMAL_CAST; import static com.facebook.presto.type.DecimalCasts.DECIMAL_TO_BIGINT_CAST; import static com.facebook.presto.type.DecimalCasts.DECIMAL_TO_BOOLEAN_CAST; import static com.facebook.presto.type.DecimalCasts.DECIMAL_TO_DOUBLE_CAST; import static com.facebook.presto.type.DecimalCasts.DECIMAL_TO_INTEGER_CAST; import static com.facebook.presto.type.DecimalCasts.DECIMAL_TO_JSON_CAST; import static com.facebook.presto.type.DecimalCasts.DECIMAL_TO_REAL_CAST; import static com.facebook.presto.type.DecimalCasts.DECIMAL_TO_SMALLINT_CAST; import static com.facebook.presto.type.DecimalCasts.DECIMAL_TO_TINYINT_CAST; import static com.facebook.presto.type.DecimalCasts.DECIMAL_TO_VARCHAR_CAST; import static com.facebook.presto.type.DecimalCasts.DOUBLE_TO_DECIMAL_CAST; import static com.facebook.presto.type.DecimalCasts.INTEGER_TO_DECIMAL_CAST; import static com.facebook.presto.type.DecimalCasts.JSON_TO_DECIMAL_CAST; import static com.facebook.presto.type.DecimalCasts.REAL_TO_DECIMAL_CAST; import static com.facebook.presto.type.DecimalCasts.SMALLINT_TO_DECIMAL_CAST; import static com.facebook.presto.type.DecimalCasts.TINYINT_TO_DECIMAL_CAST; import static com.facebook.presto.type.DecimalCasts.VARCHAR_TO_DECIMAL_CAST; import static com.facebook.presto.type.DecimalInequalityOperators.DECIMAL_BETWEEN_OPERATOR; import static com.facebook.presto.type.DecimalInequalityOperators.DECIMAL_DISTINCT_FROM_OPERATOR; import static com.facebook.presto.type.DecimalInequalityOperators.DECIMAL_EQUAL_OPERATOR; import static com.facebook.presto.type.DecimalInequalityOperators.DECIMAL_GREATER_THAN_OPERATOR; import static com.facebook.presto.type.DecimalInequalityOperators.DECIMAL_GREATER_THAN_OR_EQUAL_OPERATOR; import static com.facebook.presto.type.DecimalInequalityOperators.DECIMAL_LESS_THAN_OPERATOR; import static com.facebook.presto.type.DecimalInequalityOperators.DECIMAL_LESS_THAN_OR_EQUAL_OPERATOR; import static com.facebook.presto.type.DecimalInequalityOperators.DECIMAL_NOT_EQUAL_OPERATOR; import static com.facebook.presto.type.DecimalOperators.DECIMAL_ADD_OPERATOR; import static com.facebook.presto.type.DecimalOperators.DECIMAL_DIVIDE_OPERATOR; import static com.facebook.presto.type.DecimalOperators.DECIMAL_MODULUS_OPERATOR; import static com.facebook.presto.type.DecimalOperators.DECIMAL_MULTIPLY_OPERATOR; import static com.facebook.presto.type.DecimalOperators.DECIMAL_SUBTRACT_OPERATOR; import static com.facebook.presto.type.DecimalSaturatedFloorCasts.BIGINT_TO_DECIMAL_SATURATED_FLOOR_CAST; import static com.facebook.presto.type.DecimalSaturatedFloorCasts.DECIMAL_TO_BIGINT_SATURATED_FLOOR_CAST; import static com.facebook.presto.type.DecimalSaturatedFloorCasts.DECIMAL_TO_DECIMAL_SATURATED_FLOOR_CAST; import static com.facebook.presto.type.DecimalSaturatedFloorCasts.DECIMAL_TO_INTEGER_SATURATED_FLOOR_CAST; import static com.facebook.presto.type.DecimalSaturatedFloorCasts.DECIMAL_TO_SMALLINT_SATURATED_FLOOR_CAST; import static com.facebook.presto.type.DecimalSaturatedFloorCasts.DECIMAL_TO_TINYINT_SATURATED_FLOOR_CAST; import static com.facebook.presto.type.DecimalSaturatedFloorCasts.INTEGER_TO_DECIMAL_SATURATED_FLOOR_CAST; import static com.facebook.presto.type.DecimalSaturatedFloorCasts.SMALLINT_TO_DECIMAL_SATURATED_FLOOR_CAST; import static com.facebook.presto.type.DecimalSaturatedFloorCasts.TINYINT_TO_DECIMAL_SATURATED_FLOOR_CAST; import static com.facebook.presto.type.DecimalToDecimalCasts.DECIMAL_TO_DECIMAL_CAST; import static com.facebook.presto.type.TypeUtils.resolveTypes; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Throwables.throwIfInstanceOf; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.HOURS; @ThreadSafe public class BuiltInFunctionNamespaceManager implements FunctionNamespaceManager<BuiltInFunction> { public static final CatalogSchemaName DEFAULT_NAMESPACE = new CatalogSchemaName("presto", "default"); public static final String ID = "builtin"; private final TypeManager typeManager; private final LoadingCache<Signature, SpecializedFunctionKey> specializedFunctionKeyCache; private final LoadingCache<SpecializedFunctionKey, BuiltInScalarFunctionImplementation> specializedScalarCache; private final LoadingCache<SpecializedFunctionKey, InternalAggregationFunction> specializedAggregationCache; private final LoadingCache<SpecializedFunctionKey, WindowFunctionSupplier> specializedWindowCache; private final MagicLiteralFunction magicLiteralFunction; private volatile FunctionMap functions = new FunctionMap(); public BuiltInFunctionNamespaceManager( TypeManager typeManager, BlockEncodingSerde blockEncodingSerde, FeaturesConfig featuresConfig, FunctionManager functionManager) { this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.magicLiteralFunction = new MagicLiteralFunction(blockEncodingSerde); specializedFunctionKeyCache = CacheBuilder.newBuilder() .maximumSize(1000) .build(CacheLoader.from(this::doGetSpecializedFunctionKey)); // TODO the function map should be updated, so that this cast can be removed // We have observed repeated compilation of MethodHandle that leads to full GCs. // We notice that flushing the following caches mitigate the problem. // We suspect that it is a JVM bug that is related to stale/corrupted profiling data associated // with generated classes and/or dynamically-created MethodHandles. // This might also mitigate problems like deoptimization storm or unintended interpreted execution. specializedScalarCache = CacheBuilder.newBuilder() .maximumSize(1000) .expireAfterWrite(1, HOURS) .build(CacheLoader.from(key -> ((SqlScalarFunction) key.getFunction()) .specialize(key.getBoundVariables(), key.getArity(), typeManager, functionManager))); specializedAggregationCache = CacheBuilder.newBuilder() .maximumSize(1000) .expireAfterWrite(1, HOURS) .build(CacheLoader.from(key -> ((SqlAggregationFunction) key.getFunction()) .specialize(key.getBoundVariables(), key.getArity(), typeManager, functionManager))); specializedWindowCache = CacheBuilder.newBuilder() .maximumSize(1000) .expireAfterWrite(1, HOURS) .build(CacheLoader.from(key -> { if (key.getFunction() instanceof SqlAggregationFunction) { return supplier(key.getFunction().getSignature(), specializedAggregationCache.getUnchecked(key)); } return ((SqlWindowFunction) key.getFunction()) .specialize(key.getBoundVariables(), key.getArity(), typeManager, functionManager); })); FunctionListBuilder builder = new FunctionListBuilder() .window(RowNumberFunction.class) .window(RankFunction.class) .window(DenseRankFunction.class) .window(PercentRankFunction.class) .window(CumulativeDistributionFunction.class) .window(NTileFunction.class) .window(FirstValueFunction.class) .window(LastValueFunction.class) .window(NthValueFunction.class) .window(LagFunction.class) .window(LeadFunction.class) .aggregate(ApproximateCountDistinctAggregation.class) .aggregate(DefaultApproximateCountDistinctAggregation.class) .aggregate(SumDataSizeForStats.class) .aggregate(MaxDataSizeForStats.class) .aggregates(CountAggregation.class) .aggregates(VarianceAggregation.class) .aggregates(CentralMomentsAggregation.class) .aggregates(ApproximateLongPercentileAggregations.class) .aggregates(ApproximateLongPercentileArrayAggregations.class) .aggregates(ApproximateDoublePercentileAggregations.class) .aggregates(ApproximateDoublePercentileArrayAggregations.class) .aggregates(ApproximateRealPercentileAggregations.class) .aggregates(ApproximateRealPercentileArrayAggregations.class) .aggregates(CountIfAggregation.class) .aggregates(BooleanAndAggregation.class) .aggregates(BooleanOrAggregation.class) .aggregates(DoubleSumAggregation.class) .aggregates(RealSumAggregation.class) .aggregates(LongSumAggregation.class) .aggregates(IntervalDayToSecondSumAggregation.class) .aggregates(IntervalYearToMonthSumAggregation.class) .aggregates(AverageAggregations.class) .function(REAL_AVERAGE_AGGREGATION) .aggregates(IntervalDayToSecondAverageAggregation.class) .aggregates(IntervalYearToMonthAverageAggregation.class) .aggregates(DifferentialEntropyAggregation.class) .aggregates(EntropyAggregation.class) .aggregates(GeometricMeanAggregations.class) .aggregates(RealGeometricMeanAggregations.class) .aggregates(MergeHyperLogLogAggregation.class) .aggregates(ApproximateSetAggregation.class) .functions(QDIGEST_AGG, QDIGEST_AGG_WITH_WEIGHT, QDIGEST_AGG_WITH_WEIGHT_AND_ERROR) .function(MergeQuantileDigestFunction.MERGE) .aggregates(DoubleHistogramAggregation.class) .aggregates(RealHistogramAggregation.class) .aggregates(DoubleCovarianceAggregation.class) .aggregates(RealCovarianceAggregation.class) .aggregates(DoubleRegressionAggregation.class) .aggregates(RealRegressionAggregation.class) .aggregates(DoubleCorrelationAggregation.class) .aggregates(RealCorrelationAggregation.class) .aggregates(BitwiseOrAggregation.class) .aggregates(BitwiseAndAggregation.class) .aggregates(ClassificationMissRateAggregation.class) .aggregates(ClassificationFallOutAggregation.class) .aggregates(ClassificationPrecisionAggregation.class) .aggregates(ClassificationRecallAggregation.class) .aggregates(ClassificationThresholdsAggregation.class) .scalar(RepeatFunction.class) .scalars(SequenceFunction.class) .scalars(SessionFunctions.class) .scalars(StringFunctions.class) .scalars(WordStemFunction.class) .scalar(SplitToMapFunction.ResolveDuplicateKeys.class) .scalar(SplitToMapFunction.FailOnDuplicateKeys.class) .scalar(SplitToMultimapFunction.class) .scalars(VarbinaryFunctions.class) .scalars(UrlFunctions.class) .scalars(MathFunctions.class) .scalar(MathFunctions.Abs.class) .scalar(MathFunctions.Sign.class) .scalar(MathFunctions.Round.class) .scalar(MathFunctions.RoundN.class) .scalar(MathFunctions.Truncate.class) .scalar(MathFunctions.TruncateN.class) .scalar(MathFunctions.Ceiling.class) .scalar(MathFunctions.Floor.class) .scalars(BitwiseFunctions.class) .scalars(DateTimeFunctions.class) .scalars(JsonFunctions.class) .scalars(ColorFunctions.class) .scalars(ColorOperators.class) .scalar(ColorOperators.ColorDistinctFromOperator.class) .scalars(HyperLogLogFunctions.class) .scalars(QuantileDigestFunctions.class) .scalars(UnknownOperators.class) .scalar(UnknownOperators.UnknownDistinctFromOperator.class) .scalars(BooleanOperators.class) .scalar(BooleanOperators.BooleanDistinctFromOperator.class) .scalars(BigintOperators.class) .scalar(BigintOperators.BigintDistinctFromOperator.class) .scalars(IntegerOperators.class) .scalar(IntegerOperators.IntegerDistinctFromOperator.class) .scalars(SmallintOperators.class) .scalar(SmallintOperators.SmallintDistinctFromOperator.class) .scalars(TinyintOperators.class) .scalar(TinyintOperators.TinyintDistinctFromOperator.class) .scalars(DoubleOperators.class) .scalar(DoubleOperators.DoubleDistinctFromOperator.class) .scalars(RealOperators.class) .scalar(RealOperators.RealDistinctFromOperator.class) .scalars(VarcharOperators.class) .scalar(VarcharOperators.VarcharDistinctFromOperator.class) .scalars(VarbinaryOperators.class) .scalar(VarbinaryOperators.VarbinaryDistinctFromOperator.class) .scalars(DateOperators.class) .scalar(DateOperators.DateDistinctFromOperator.class) .scalars(TimeOperators.class) .scalar(TimeOperators.TimeDistinctFromOperator.class) .scalars(TimestampOperators.class) .scalar(TimestampOperators.TimestampDistinctFromOperator.class) .scalars(IntervalDayTimeOperators.class) .scalar(IntervalDayTimeOperators.IntervalDayTimeDistinctFromOperator.class) .scalars(IntervalYearMonthOperators.class) .scalar(IntervalYearMonthOperators.IntervalYearMonthDistinctFromOperator.class) .scalars(TimeWithTimeZoneOperators.class) .scalar(TimeWithTimeZoneOperators.TimeWithTimeZoneDistinctFromOperator.class) .scalars(TimestampWithTimeZoneOperators.class) .scalar(TimestampWithTimeZoneOperators.TimestampWithTimeZoneDistinctFromOperator.class) .scalars(DateTimeOperators.class) .scalars(HyperLogLogOperators.class) .scalars(QuantileDigestOperators.class) .scalars(IpAddressOperators.class) .scalar(IpAddressOperators.IpAddressDistinctFromOperator.class) .scalars(IpPrefixFunctions.class) .scalars(IpPrefixOperators.class) .scalar(IpPrefixOperators.IpPrefixDistinctFromOperator.class) .scalars(LikeFunctions.class) .scalars(ArrayFunctions.class) .scalars(HmacFunctions.class) .scalars(DataSizeFunctions.class) .scalar(ArrayCardinalityFunction.class) .scalar(ArrayContains.class) .scalar(ArrayFilterFunction.class) .scalar(ArrayPositionFunction.class) .scalars(CombineHashFunction.class) .scalars(JsonOperators.class) .scalar(JsonOperators.JsonDistinctFromOperator.class) .scalars(FailureFunction.class) .scalars(JoniRegexpCasts.class) .scalars(CharacterStringCasts.class) .scalars(CharOperators.class) .scalar(CharOperators.CharDistinctFromOperator.class) .scalar(DecimalOperators.Negation.class) .scalar(DecimalOperators.HashCode.class) .scalar(DecimalOperators.Indeterminate.class) .scalar(DecimalOperators.XxHash64Operator.class) .functions(IDENTITY_CAST, CAST_FROM_UNKNOWN) .scalar(ArrayLessThanOperator.class) .scalar(ArrayLessThanOrEqualOperator.class) .scalar(ArrayRemoveFunction.class) .scalar(ArrayGreaterThanOperator.class) .scalar(ArrayGreaterThanOrEqualOperator.class) .scalar(ArrayElementAtFunction.class) .scalar(ArraySortFunction.class) .scalar(ArraySortComparatorFunction.class) .scalar(ArrayShuffleFunction.class) .scalar(ArrayReverseFunction.class) .scalar(ArrayMinFunction.class) .scalar(ArrayMaxFunction.class) .scalar(ArrayDistinctFunction.class) .scalar(ArrayNotEqualOperator.class) .scalar(ArrayEqualOperator.class) .scalar(ArrayHashCodeOperator.class) .scalar(ArrayIntersectFunction.class) .scalar(ArraysOverlapFunction.class) .scalar(ArrayDistinctFromOperator.class) .scalar(ArrayUnionFunction.class) .scalar(ArrayExceptFunction.class) .scalar(ArraySliceFunction.class) .scalar(ArrayIndeterminateOperator.class) .scalar(ArrayCombinationsFunction.class) .scalar(ArrayNgramsFunction.class) .scalar(ArrayAllMatchFunction.class) .scalar(ArrayAnyMatchFunction.class) .scalar(ArrayNoneMatchFunction.class) .scalar(MapDistinctFromOperator.class) .scalar(MapEqualOperator.class) .scalar(MapEntriesFunction.class) .scalar(MapFromEntriesFunction.class) .scalar(MultimapFromEntriesFunction.class) .scalar(MapNotEqualOperator.class) .scalar(MapKeys.class) .scalar(MapValues.class) .scalar(MapCardinalityFunction.class) .scalar(EmptyMapConstructor.class) .scalar(MapIndeterminateOperator.class) .scalar(TypeOfFunction.class) .scalar(TryFunction.class) .functions(ZIP_WITH_FUNCTION, MAP_ZIP_WITH_FUNCTION) .functions(ZIP_FUNCTIONS) .functions(ARRAY_JOIN, ARRAY_JOIN_WITH_NULL_REPLACEMENT) .functions(ARRAY_TO_ARRAY_CAST) .functions(ARRAY_TO_ELEMENT_CONCAT_FUNCTION, ELEMENT_TO_ARRAY_CONCAT_FUNCTION) .function(MAP_HASH_CODE) .function(MAP_ELEMENT_AT) .function(MAP_CONCAT_FUNCTION) .function(MAP_TO_MAP_CAST) .function(ARRAY_FLATTEN_FUNCTION) .function(ARRAY_CONCAT_FUNCTION) .functions(ARRAY_CONSTRUCTOR, ARRAY_SUBSCRIPT, ARRAY_TO_JSON, JSON_TO_ARRAY, JSON_STRING_TO_ARRAY) .function(new ArrayAggregationFunction(featuresConfig.isLegacyArrayAgg(), featuresConfig.getArrayAggGroupImplementation())) .functions(new MapSubscriptOperator(featuresConfig.isLegacyMapSubscript())) .functions(MAP_CONSTRUCTOR, MAP_TO_JSON, JSON_TO_MAP, JSON_STRING_TO_MAP) .functions(MAP_AGG, MAP_UNION) .function(REDUCE_AGG) .function(new MultimapAggregationFunction(featuresConfig.getMultimapAggGroupImplementation())) .functions(DECIMAL_TO_VARCHAR_CAST, DECIMAL_TO_INTEGER_CAST, DECIMAL_TO_BIGINT_CAST, DECIMAL_TO_DOUBLE_CAST, DECIMAL_TO_REAL_CAST, DECIMAL_TO_BOOLEAN_CAST, DECIMAL_TO_TINYINT_CAST, DECIMAL_TO_SMALLINT_CAST) .functions(VARCHAR_TO_DECIMAL_CAST, INTEGER_TO_DECIMAL_CAST, BIGINT_TO_DECIMAL_CAST, DOUBLE_TO_DECIMAL_CAST, REAL_TO_DECIMAL_CAST, BOOLEAN_TO_DECIMAL_CAST, TINYINT_TO_DECIMAL_CAST, SMALLINT_TO_DECIMAL_CAST) .functions(JSON_TO_DECIMAL_CAST, DECIMAL_TO_JSON_CAST) .functions(DECIMAL_ADD_OPERATOR, DECIMAL_SUBTRACT_OPERATOR, DECIMAL_MULTIPLY_OPERATOR, DECIMAL_DIVIDE_OPERATOR, DECIMAL_MODULUS_OPERATOR) .functions(DECIMAL_EQUAL_OPERATOR, DECIMAL_NOT_EQUAL_OPERATOR) .functions(DECIMAL_LESS_THAN_OPERATOR, DECIMAL_LESS_THAN_OR_EQUAL_OPERATOR) .functions(DECIMAL_GREATER_THAN_OPERATOR, DECIMAL_GREATER_THAN_OR_EQUAL_OPERATOR) .function(DECIMAL_TO_DECIMAL_SATURATED_FLOOR_CAST) .functions(DECIMAL_TO_BIGINT_SATURATED_FLOOR_CAST, BIGINT_TO_DECIMAL_SATURATED_FLOOR_CAST) .functions(DECIMAL_TO_INTEGER_SATURATED_FLOOR_CAST, INTEGER_TO_DECIMAL_SATURATED_FLOOR_CAST) .functions(DECIMAL_TO_SMALLINT_SATURATED_FLOOR_CAST, SMALLINT_TO_DECIMAL_SATURATED_FLOOR_CAST) .functions(DECIMAL_TO_TINYINT_SATURATED_FLOOR_CAST, TINYINT_TO_DECIMAL_SATURATED_FLOOR_CAST) .function(DECIMAL_BETWEEN_OPERATOR) .function(DECIMAL_DISTINCT_FROM_OPERATOR) .function(new Histogram(featuresConfig.getHistogramGroupImplementation())) .function(CHECKSUM_AGGREGATION) .function(IDENTITY_CAST) .function(ARBITRARY_AGGREGATION) .functions(GREATEST, LEAST) .functions(MAX_BY, MIN_BY, MAX_BY_N_AGGREGATION, MIN_BY_N_AGGREGATION) .functions(MAX_AGGREGATION, MIN_AGGREGATION, MAX_N_AGGREGATION, MIN_N_AGGREGATION) .function(COUNT_COLUMN) .functions(ROW_HASH_CODE, ROW_TO_JSON, JSON_TO_ROW, JSON_STRING_TO_ROW, ROW_DISTINCT_FROM, ROW_EQUAL, ROW_GREATER_THAN, ROW_GREATER_THAN_OR_EQUAL, ROW_LESS_THAN, ROW_LESS_THAN_OR_EQUAL, ROW_NOT_EQUAL, ROW_TO_ROW_CAST, ROW_INDETERMINATE) .functions(VARCHAR_CONCAT, VARBINARY_CONCAT) .function(DECIMAL_TO_DECIMAL_CAST) .function(castVarcharToRe2JRegexp(featuresConfig.getRe2JDfaStatesLimit(), featuresConfig.getRe2JDfaRetries())) .function(castCharToRe2JRegexp(featuresConfig.getRe2JDfaStatesLimit(), featuresConfig.getRe2JDfaRetries())) .function(DECIMAL_AVERAGE_AGGREGATION) .function(DECIMAL_SUM_AGGREGATION) .function(DECIMAL_MOD_FUNCTION) .functions(ARRAY_TRANSFORM_FUNCTION, ARRAY_REDUCE_FUNCTION) .functions(MAP_FILTER_FUNCTION, MAP_TRANSFORM_KEY_FUNCTION, MAP_TRANSFORM_VALUE_FUNCTION) .function(TRY_CAST) .aggregate(MergeSetDigestAggregation.class) .aggregate(BuildSetDigestAggregation.class) .scalars(SetDigestFunctions.class) .scalars(SetDigestOperators.class) .aggregates(MergeKHyperLogLogAggregationFunction.class) .aggregates(KHyperLogLogAggregationFunction.class) .scalars(KHyperLogLogFunctions.class) .scalars(KHyperLogLogOperators.class) .scalars(WilsonInterval.class) .scalars(TDigestOperators.class) .scalars(TDigestFunctions.class) .functions(TDIGEST_AGG, TDIGEST_AGG_WITH_WEIGHT, TDIGEST_AGG_WITH_WEIGHT_AND_COMPRESSION) .function(MergeTDigestFunction.MERGE); switch (featuresConfig.getRegexLibrary()) { case JONI: builder.scalars(JoniRegexpFunctions.class); builder.scalar(JoniRegexpReplaceLambdaFunction.class); break; case RE2J: builder.scalars(Re2JRegexpFunctions.class); builder.scalar(Re2JRegexpReplaceLambdaFunction.class); break; } if (featuresConfig.isLegacyLogFunction()) { builder.scalar(LegacyLogFunction.class); } registerBuiltInFunctions(builder.getFunctions()); } public synchronized void registerBuiltInFunctions(List<? extends BuiltInFunction> functions) { for (SqlFunction function : functions) { for (SqlFunction existingFunction : this.functions.list()) { checkArgument(!function.getSignature().equals(existingFunction.getSignature()), "Function already registered: %s", function.getSignature()); } } this.functions = new FunctionMap(this.functions, functions); } @Override public void createFunction(SqlInvokedFunction function, boolean replace) { throw new PrestoException(GENERIC_USER_ERROR, format("Cannot create function in built-in function namespace: %s", function.getSignature().getName())); } @Override public void alterFunction(QualifiedFunctionName functionName, Optional<List<TypeSignature>> parameterTypes, AlterRoutineCharacteristics alterRoutineCharacteristics) { throw new PrestoException(GENERIC_USER_ERROR, format("Cannot alter function in built-in function namespace: %s", functionName)); } @Override public void dropFunction(QualifiedFunctionName functionName, Optional<List<TypeSignature>> parameterTypes, boolean exists) { throw new PrestoException(GENERIC_USER_ERROR, format("Cannot drop function in built-in function namespace: %s", functionName)); } public String getName() { return ID; } @Override public FunctionNamespaceTransactionHandle beginTransaction() { return new EmptyTransactionHandle(); } @Override public void commit(FunctionNamespaceTransactionHandle transactionHandle) { } @Override public void abort(FunctionNamespaceTransactionHandle transactionHandle) { } @Override public Collection<BuiltInFunction> listFunctions() { return functions.list(); } @Override public Collection<BuiltInFunction> getFunctions(Optional<? extends FunctionNamespaceTransactionHandle> transactionHandle, QualifiedFunctionName functionName) { return functions.get(functionName); } @Override public FunctionHandle getFunctionHandle(Optional<? extends FunctionNamespaceTransactionHandle> transactionHandle, Signature signature) { return new BuiltInFunctionHandle(signature); } @Override public FunctionMetadata getFunctionMetadata(FunctionHandle functionHandle) { checkArgument(functionHandle instanceof BuiltInFunctionHandle, "Expect BuiltInFunctionHandle"); Signature signature = ((BuiltInFunctionHandle) functionHandle).getSignature(); SpecializedFunctionKey functionKey; try { functionKey = specializedFunctionKeyCache.getUnchecked(signature); } catch (UncheckedExecutionException e) { throwIfInstanceOf(e.getCause(), PrestoException.class); throw e; } BuiltInFunction function = functionKey.getFunction(); Optional<OperatorType> operatorType = tryGetOperatorType(signature.getName()); if (operatorType.isPresent()) { return new FunctionMetadata( operatorType.get(), signature.getArgumentTypes(), signature.getReturnType(), signature.getKind(), BUILTIN, function.isDeterministic(), function.isCalledOnNullInput()); } else { return new FunctionMetadata( signature.getName(), signature.getArgumentTypes(), signature.getReturnType(), signature.getKind(), BUILTIN, function.isDeterministic(), function.isCalledOnNullInput()); } } public WindowFunctionSupplier getWindowFunctionImplementation(FunctionHandle functionHandle) { checkArgument(functionHandle instanceof BuiltInFunctionHandle, "Expect BuiltInFunctionHandle"); Signature signature = ((BuiltInFunctionHandle) functionHandle).getSignature(); checkArgument(signature.getKind() == WINDOW || signature.getKind() == AGGREGATE, "%s is not a window function", signature); checkArgument(signature.getTypeVariableConstraints().isEmpty(), "%s has unbound type parameters", signature); try { return specializedWindowCache.getUnchecked(getSpecializedFunctionKey(signature)); } catch (UncheckedExecutionException e) { throwIfInstanceOf(e.getCause(), PrestoException.class); throw e; } } public InternalAggregationFunction getAggregateFunctionImplementation(FunctionHandle functionHandle) { checkArgument(functionHandle instanceof BuiltInFunctionHandle, "Expect BuiltInFunctionHandle"); Signature signature = ((BuiltInFunctionHandle) functionHandle).getSignature(); checkArgument(signature.getKind() == AGGREGATE, "%s is not an aggregate function", signature); checkArgument(signature.getTypeVariableConstraints().isEmpty(), "%s has unbound type parameters", signature); try { return specializedAggregationCache.getUnchecked(getSpecializedFunctionKey(signature)); } catch (UncheckedExecutionException e) { throwIfInstanceOf(e.getCause(), PrestoException.class); throw e; } } @Override public ScalarFunctionImplementation getScalarFunctionImplementation(FunctionHandle functionHandle) { checkArgument(functionHandle instanceof BuiltInFunctionHandle, "Expect BuiltInFunctionHandle"); return getScalarFunctionImplementation(((BuiltInFunctionHandle) functionHandle).getSignature()); } public BuiltInScalarFunctionImplementation getScalarFunctionImplementation(Signature signature) { checkArgument(signature.getKind() == SCALAR, "%s is not a scalar function", signature); checkArgument(signature.getTypeVariableConstraints().isEmpty(), "%s has unbound type parameters", signature); try { return specializedScalarCache.getUnchecked(getSpecializedFunctionKey(signature)); } catch (UncheckedExecutionException e) { throwIfInstanceOf(e.getCause(), PrestoException.class); throw e; } } private SpecializedFunctionKey getSpecializedFunctionKey(Signature signature) { try { return specializedFunctionKeyCache.getUnchecked(signature); } catch (UncheckedExecutionException e) { throwIfInstanceOf(e.getCause(), PrestoException.class); throw e; } } private SpecializedFunctionKey doGetSpecializedFunctionKey(Signature signature) { Iterable<BuiltInFunction> candidates = getFunctions(null, signature.getName()); // search for exact match Type returnType = typeManager.getType(signature.getReturnType()); List<TypeSignatureProvider> argumentTypeSignatureProviders = fromTypeSignatures(signature.getArgumentTypes()); for (BuiltInFunction candidate : candidates) { Optional<BoundVariables> boundVariables = new SignatureBinder(typeManager, candidate.getSignature(), false) .bindVariables(argumentTypeSignatureProviders, returnType); if (boundVariables.isPresent()) { return new SpecializedFunctionKey(candidate, boundVariables.get(), argumentTypeSignatureProviders.size()); } } // TODO: hack because there could be "type only" coercions (which aren't necessarily included as implicit casts), // so do a second pass allowing "type only" coercions List<Type> argumentTypes = resolveTypes(signature.getArgumentTypes(), typeManager); for (BuiltInFunction candidate : candidates) { SignatureBinder binder = new SignatureBinder(typeManager, candidate.getSignature(), true); Optional<BoundVariables> boundVariables = binder.bindVariables(argumentTypeSignatureProviders, returnType); if (!boundVariables.isPresent()) { continue; } Signature boundSignature = applyBoundVariables(candidate.getSignature(), boundVariables.get(), argumentTypes.size()); if (!typeManager.isTypeOnlyCoercion(typeManager.getType(boundSignature.getReturnType()), returnType)) { continue; } boolean nonTypeOnlyCoercion = false; for (int i = 0; i < argumentTypes.size(); i++) { Type expectedType = typeManager.getType(boundSignature.getArgumentTypes().get(i)); if (!typeManager.isTypeOnlyCoercion(argumentTypes.get(i), expectedType)) { nonTypeOnlyCoercion = true; break; } } if (nonTypeOnlyCoercion) { continue; } return new SpecializedFunctionKey(candidate, boundVariables.get(), argumentTypes.size()); } // TODO: this is a hack and should be removed if (signature.getNameSuffix().startsWith(MAGIC_LITERAL_FUNCTION_PREFIX)) { List<TypeSignature> parameterTypes = signature.getArgumentTypes(); // extract type from function name String typeName = signature.getNameSuffix().substring(MAGIC_LITERAL_FUNCTION_PREFIX.length()); // lookup the type Type type = typeManager.getType(parseTypeSignature(typeName)); // verify we have one parameter of the proper type checkArgument(parameterTypes.size() == 1, "Expected one argument to literal function, but got %s", parameterTypes); Type parameterType = typeManager.getType(parameterTypes.get(0)); requireNonNull(parameterType, format("Type %s not found", parameterTypes.get(0))); return new SpecializedFunctionKey( magicLiteralFunction, BoundVariables.builder() .setTypeVariable("T", parameterType) .setTypeVariable("R", type) .build(), 1); } throw new PrestoException(FUNCTION_IMPLEMENTATION_MISSING, format("%s not found", signature)); } private static class EmptyTransactionHandle implements FunctionNamespaceTransactionHandle { } private static class FunctionMap { private final Multimap<QualifiedFunctionName, BuiltInFunction> functions; public FunctionMap() { functions = ImmutableListMultimap.of(); } public FunctionMap(FunctionMap map, Iterable<? extends BuiltInFunction> functions) { this.functions = ImmutableListMultimap.<QualifiedFunctionName, BuiltInFunction>builder() .putAll(map.functions) .putAll(Multimaps.index(functions, function -> function.getSignature().getName())) .build(); // Make sure all functions with the same name are aggregations or none of them are for (Map.Entry<QualifiedFunctionName, Collection<BuiltInFunction>> entry : this.functions.asMap().entrySet()) { Collection<BuiltInFunction> values = entry.getValue(); long aggregations = values.stream() .map(function -> function.getSignature().getKind()) .filter(kind -> kind == AGGREGATE) .count(); checkState(aggregations == 0 || aggregations == values.size(), "'%s' is both an aggregation and a scalar function", entry.getKey()); } } public List<BuiltInFunction> list() { return ImmutableList.copyOf(functions.values()); } public Collection<BuiltInFunction> get(QualifiedFunctionName name) { return functions.get(name); } } private static class MagicLiteralFunction extends SqlScalarFunction { private final BlockEncodingSerde blockEncodingSerde; MagicLiteralFunction(BlockEncodingSerde blockEncodingSerde) { super(new Signature(QualifiedFunctionName.of(DEFAULT_NAMESPACE, MAGIC_LITERAL_FUNCTION_PREFIX), SCALAR, TypeSignature.parseTypeSignature("R"), TypeSignature.parseTypeSignature("T"))); this.blockEncodingSerde = requireNonNull(blockEncodingSerde, "blockEncodingSerde is null"); } @Override public final SqlFunctionVisibility getVisibility() { return HIDDEN; } @Override public boolean isDeterministic() { return true; } @Override public String getDescription() { return "magic literal"; } @Override public BuiltInScalarFunctionImplementation specialize(BoundVariables boundVariables, int arity, TypeManager typeManager, FunctionManager functionManager) { Type parameterType = boundVariables.getTypeVariable("T"); Type type = boundVariables.getTypeVariable("R"); MethodHandle methodHandle = null; if (parameterType.getJavaType() == type.getJavaType()) { methodHandle = MethodHandles.identity(parameterType.getJavaType()); } if (parameterType.getJavaType() == Slice.class) { if (type.getJavaType() == Block.class) { methodHandle = BlockSerdeUtil.READ_BLOCK.bindTo(blockEncodingSerde); } } checkArgument(methodHandle != null, "Expected type %s to use (or can be converted into) Java type %s, but Java type is %s", type, parameterType.getJavaType(), type.getJavaType()); return new BuiltInScalarFunctionImplementation( false, ImmutableList.of(valueTypeArgumentProperty(RETURN_NULL_ON_NULL)), methodHandle); } } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import com.google.schemaorg.SchemaOrgTypeImpl; import com.google.schemaorg.ValueType; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.GoogConstants; import com.google.schemaorg.goog.PopularityScoreSpecification; /** Implementation of {@link Bone}. */ public class BoneImpl extends AnatomicalStructureImpl implements Bone { private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet(); private static ImmutableSet<String> initializePropertySet() { ImmutableSet.Builder<String> builder = ImmutableSet.builder(); builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE); builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME); builder.add(CoreConstants.PROPERTY_ASSOCIATED_PATHOPHYSIOLOGY); builder.add(CoreConstants.PROPERTY_BODY_LOCATION); builder.add(CoreConstants.PROPERTY_CODE); builder.add(CoreConstants.PROPERTY_CONNECTED_TO); builder.add(CoreConstants.PROPERTY_DESCRIPTION); builder.add(CoreConstants.PROPERTY_DIAGRAM); builder.add(CoreConstants.PROPERTY_FUNCTION); builder.add(CoreConstants.PROPERTY_GUIDELINE); builder.add(CoreConstants.PROPERTY_IMAGE); builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE); builder.add(CoreConstants.PROPERTY_MEDICINE_SYSTEM); builder.add(CoreConstants.PROPERTY_NAME); builder.add(CoreConstants.PROPERTY_PART_OF_SYSTEM); builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION); builder.add(CoreConstants.PROPERTY_RECOGNIZING_AUTHORITY); builder.add(CoreConstants.PROPERTY_RELATED_CONDITION); builder.add(CoreConstants.PROPERTY_RELATED_THERAPY); builder.add(CoreConstants.PROPERTY_RELEVANT_SPECIALTY); builder.add(CoreConstants.PROPERTY_SAME_AS); builder.add(CoreConstants.PROPERTY_STUDY); builder.add(CoreConstants.PROPERTY_SUB_STRUCTURE); builder.add(CoreConstants.PROPERTY_URL); builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION); builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE); return builder.build(); } static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<Bone.Builder> implements Bone.Builder { @Override public Bone.Builder addAdditionalType(URL value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value); } @Override public Bone.Builder addAdditionalType(String value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value)); } @Override public Bone.Builder addAlternateName(Text value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value); } @Override public Bone.Builder addAlternateName(String value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value)); } @Override public Bone.Builder addAssociatedPathophysiology(Text value) { return addProperty(CoreConstants.PROPERTY_ASSOCIATED_PATHOPHYSIOLOGY, value); } @Override public Bone.Builder addAssociatedPathophysiology(String value) { return addProperty(CoreConstants.PROPERTY_ASSOCIATED_PATHOPHYSIOLOGY, Text.of(value)); } @Override public Bone.Builder addBodyLocation(Text value) { return addProperty(CoreConstants.PROPERTY_BODY_LOCATION, value); } @Override public Bone.Builder addBodyLocation(String value) { return addProperty(CoreConstants.PROPERTY_BODY_LOCATION, Text.of(value)); } @Override public Bone.Builder addCode(MedicalCode value) { return addProperty(CoreConstants.PROPERTY_CODE, value); } @Override public Bone.Builder addCode(MedicalCode.Builder value) { return addProperty(CoreConstants.PROPERTY_CODE, value.build()); } @Override public Bone.Builder addCode(String value) { return addProperty(CoreConstants.PROPERTY_CODE, Text.of(value)); } @Override public Bone.Builder addConnectedTo(AnatomicalStructure value) { return addProperty(CoreConstants.PROPERTY_CONNECTED_TO, value); } @Override public Bone.Builder addConnectedTo(AnatomicalStructure.Builder value) { return addProperty(CoreConstants.PROPERTY_CONNECTED_TO, value.build()); } @Override public Bone.Builder addConnectedTo(String value) { return addProperty(CoreConstants.PROPERTY_CONNECTED_TO, Text.of(value)); } @Override public Bone.Builder addDescription(Text value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value); } @Override public Bone.Builder addDescription(String value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value)); } @Override public Bone.Builder addDiagram(ImageObject value) { return addProperty(CoreConstants.PROPERTY_DIAGRAM, value); } @Override public Bone.Builder addDiagram(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_DIAGRAM, value.build()); } @Override public Bone.Builder addDiagram(String value) { return addProperty(CoreConstants.PROPERTY_DIAGRAM, Text.of(value)); } @Override public Bone.Builder addFunction(Text value) { return addProperty(CoreConstants.PROPERTY_FUNCTION, value); } @Override public Bone.Builder addFunction(String value) { return addProperty(CoreConstants.PROPERTY_FUNCTION, Text.of(value)); } @Override public Bone.Builder addGuideline(MedicalGuideline value) { return addProperty(CoreConstants.PROPERTY_GUIDELINE, value); } @Override public Bone.Builder addGuideline(MedicalGuideline.Builder value) { return addProperty(CoreConstants.PROPERTY_GUIDELINE, value.build()); } @Override public Bone.Builder addGuideline(String value) { return addProperty(CoreConstants.PROPERTY_GUIDELINE, Text.of(value)); } @Override public Bone.Builder addImage(ImageObject value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public Bone.Builder addImage(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value.build()); } @Override public Bone.Builder addImage(URL value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public Bone.Builder addImage(String value) { return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value)); } @Override public Bone.Builder addMainEntityOfPage(CreativeWork value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public Bone.Builder addMainEntityOfPage(CreativeWork.Builder value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build()); } @Override public Bone.Builder addMainEntityOfPage(URL value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public Bone.Builder addMainEntityOfPage(String value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value)); } @Override public Bone.Builder addMedicineSystem(MedicineSystem value) { return addProperty(CoreConstants.PROPERTY_MEDICINE_SYSTEM, value); } @Override public Bone.Builder addMedicineSystem(String value) { return addProperty(CoreConstants.PROPERTY_MEDICINE_SYSTEM, Text.of(value)); } @Override public Bone.Builder addName(Text value) { return addProperty(CoreConstants.PROPERTY_NAME, value); } @Override public Bone.Builder addName(String value) { return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value)); } @Override public Bone.Builder addPartOfSystem(AnatomicalSystem value) { return addProperty(CoreConstants.PROPERTY_PART_OF_SYSTEM, value); } @Override public Bone.Builder addPartOfSystem(AnatomicalSystem.Builder value) { return addProperty(CoreConstants.PROPERTY_PART_OF_SYSTEM, value.build()); } @Override public Bone.Builder addPartOfSystem(String value) { return addProperty(CoreConstants.PROPERTY_PART_OF_SYSTEM, Text.of(value)); } @Override public Bone.Builder addPotentialAction(Action value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value); } @Override public Bone.Builder addPotentialAction(Action.Builder value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build()); } @Override public Bone.Builder addPotentialAction(String value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value)); } @Override public Bone.Builder addRecognizingAuthority(Organization value) { return addProperty(CoreConstants.PROPERTY_RECOGNIZING_AUTHORITY, value); } @Override public Bone.Builder addRecognizingAuthority(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_RECOGNIZING_AUTHORITY, value.build()); } @Override public Bone.Builder addRecognizingAuthority(String value) { return addProperty(CoreConstants.PROPERTY_RECOGNIZING_AUTHORITY, Text.of(value)); } @Override public Bone.Builder addRelatedCondition(MedicalCondition value) { return addProperty(CoreConstants.PROPERTY_RELATED_CONDITION, value); } @Override public Bone.Builder addRelatedCondition(MedicalCondition.Builder value) { return addProperty(CoreConstants.PROPERTY_RELATED_CONDITION, value.build()); } @Override public Bone.Builder addRelatedCondition(String value) { return addProperty(CoreConstants.PROPERTY_RELATED_CONDITION, Text.of(value)); } @Override public Bone.Builder addRelatedTherapy(MedicalTherapy value) { return addProperty(CoreConstants.PROPERTY_RELATED_THERAPY, value); } @Override public Bone.Builder addRelatedTherapy(MedicalTherapy.Builder value) { return addProperty(CoreConstants.PROPERTY_RELATED_THERAPY, value.build()); } @Override public Bone.Builder addRelatedTherapy(String value) { return addProperty(CoreConstants.PROPERTY_RELATED_THERAPY, Text.of(value)); } @Override public Bone.Builder addRelevantSpecialty(MedicalSpecialty value) { return addProperty(CoreConstants.PROPERTY_RELEVANT_SPECIALTY, value); } @Override public Bone.Builder addRelevantSpecialty(String value) { return addProperty(CoreConstants.PROPERTY_RELEVANT_SPECIALTY, Text.of(value)); } @Override public Bone.Builder addSameAs(URL value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, value); } @Override public Bone.Builder addSameAs(String value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value)); } @Override public Bone.Builder addStudy(MedicalStudy value) { return addProperty(CoreConstants.PROPERTY_STUDY, value); } @Override public Bone.Builder addStudy(MedicalStudy.Builder value) { return addProperty(CoreConstants.PROPERTY_STUDY, value.build()); } @Override public Bone.Builder addStudy(String value) { return addProperty(CoreConstants.PROPERTY_STUDY, Text.of(value)); } @Override public Bone.Builder addSubStructure(AnatomicalStructure value) { return addProperty(CoreConstants.PROPERTY_SUB_STRUCTURE, value); } @Override public Bone.Builder addSubStructure(AnatomicalStructure.Builder value) { return addProperty(CoreConstants.PROPERTY_SUB_STRUCTURE, value.build()); } @Override public Bone.Builder addSubStructure(String value) { return addProperty(CoreConstants.PROPERTY_SUB_STRUCTURE, Text.of(value)); } @Override public Bone.Builder addUrl(URL value) { return addProperty(CoreConstants.PROPERTY_URL, value); } @Override public Bone.Builder addUrl(String value) { return addProperty(CoreConstants.PROPERTY_URL, Text.of(value)); } @Override public Bone.Builder addDetailedDescription(Article value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value); } @Override public Bone.Builder addDetailedDescription(Article.Builder value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build()); } @Override public Bone.Builder addDetailedDescription(String value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value)); } @Override public Bone.Builder addPopularityScore(PopularityScoreSpecification value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value); } @Override public Bone.Builder addPopularityScore(PopularityScoreSpecification.Builder value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build()); } @Override public Bone.Builder addPopularityScore(String value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value)); } @Override public Bone build() { return new BoneImpl(properties, reverseMap); } } public BoneImpl(Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) { super(properties, reverseMap); } @Override public String getFullTypeName() { return CoreConstants.TYPE_BONE; } @Override public boolean includesProperty(String property) { return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property) || PROPERTY_SET.contains(GoogConstants.NAMESPACE + property) || PROPERTY_SET.contains(property); } }
package org.broadinstitute.hellbender.tools.walkers.haplotypecaller; import com.google.common.annotations.VisibleForTesting; import htsjdk.samtools.Cigar; import htsjdk.samtools.CigarElement; import htsjdk.samtools.CigarOperator; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.util.Locatable; import htsjdk.variant.variantcontext.*; import htsjdk.variant.vcf.VCFHeaderLine; import htsjdk.variant.vcf.VCFSimpleHeaderLine; import org.broadinstitute.hellbender.engine.AlignmentContext; import org.broadinstitute.hellbender.engine.AssemblyRegion; import org.broadinstitute.hellbender.tools.walkers.genotyper.PloidyModel; import org.broadinstitute.hellbender.tools.walkers.variantutils.PosteriorProbabilitiesUtils; import org.broadinstitute.hellbender.utils.*; import org.broadinstitute.hellbender.utils.genotyper.ReadLikelihoods; import org.broadinstitute.hellbender.utils.genotyper.SampleList; import org.broadinstitute.hellbender.utils.haplotype.Haplotype; import org.broadinstitute.hellbender.utils.locusiterator.LocusIteratorByState; import org.broadinstitute.hellbender.utils.param.ParamUtils; import org.broadinstitute.hellbender.utils.pileup.PileupElement; import org.broadinstitute.hellbender.utils.pileup.ReadPileup; import org.broadinstitute.hellbender.utils.read.AlignmentUtils; import org.broadinstitute.hellbender.utils.read.GATKRead; import org.broadinstitute.hellbender.utils.read.ReadCoordinateComparator; import org.broadinstitute.hellbender.utils.variant.GATKVCFConstants; import org.broadinstitute.hellbender.utils.variant.GATKVariantContextUtils; import org.broadinstitute.hellbender.utils.variant.HomoSapiensConstants; import java.util.*; import java.util.stream.Collectors; import java.util.stream.DoubleStream; /** * Code for estimating the reference confidence * * This code can estimate the probability that the data for a single sample is consistent with a * well-determined REF/REF diploid genotype. * */ public final class ReferenceConfidenceModel { private final SampleList samples; private final int indelInformativeDepthIndelSize; private final int numRefSamplesForPrior; private final PosteriorProbabilitiesUtils.PosteriorProbabilitiesOptions options; /** * Surrogate quality score for no base calls. * <p> * This is the quality assigned to deletion (so without its own base-call quality) pile-up elements, * when assessing the confidence on the hom-ref call at that site. * </p> */ private static final byte REF_MODEL_DELETION_QUAL = 30; /** * Base calls with quality threshold lower than this number won't be considered when assessing the * confidence on the hom-ref call. */ private static final byte BASE_QUAL_THRESHOLD = 6; /** * Only base calls with quality strictly greater than this constant, * will be considered high quality if they are part of a soft-clip. */ private static final byte HQ_BASE_QUALITY_SOFTCLIP_THRESHOLD = 28; //TODO change this: https://github.com/broadinstitute/gsa-unstable/issues/1108 protected static final int MAX_N_INDEL_INFORMATIVE_READS = 40; // more than this is overkill because GQs are capped at 99 anyway private static final int INITIAL_INDEL_LK_CACHE_PLOIDY_CAPACITY = 20; private static GenotypeLikelihoods[][] indelPLCache = new GenotypeLikelihoods[INITIAL_INDEL_LK_CACHE_PLOIDY_CAPACITY + 1][]; /** * Indel error rate for the indel model used to assess the confidence on the hom-ref call. */ private static final double INDEL_ERROR_RATE = -4.5; // 10^-4.5 indel errors per bp /** * Phred scaled qual value that corresponds to the {@link #INDEL_ERROR_RATE indel error rate}. */ private static final byte INDEL_QUAL = (byte) Math.round(INDEL_ERROR_RATE * -10.0); /** * No indel likelihood (ref allele) used in the indel model to assess the confidence on the hom-ref call. */ private static final double NO_INDEL_LIKELIHOOD = QualityUtils.qualToProbLog10(INDEL_QUAL); /** * Indel likelihood (alt. allele) used in the indel model to assess the confidence on the hom-ref call. */ private static final double INDEL_LIKELIHOOD = QualityUtils.qualToErrorProbLog10(INDEL_QUAL); private static final int IDX_HOM_REF = 0; /** * Options related to posterior probability calcs */ private static final boolean useInputSamplesAlleleCounts = false; //by definition ref-conf will be single-sample; inputs should get ignored but let's be explicit private static final boolean useMLEAC = true; private static final boolean ignoreInputSamplesForMissingVariants = true; private static final boolean useFlatPriorsForIndels = false; /** * Holds information about a genotype call of a single sample reference vs. any non-ref event * * IMPORTANT PERFORMANCE NOTE!!! Allowing direct field access (within this class only) speeds up * the HaplotypeCaller by ~10% vs. accessing the fields indirectly via setters, as seen in a profiler. */ @VisibleForTesting static final class RefVsAnyResult { /** * The genotype likelihoods for ref/ref ref/non-ref non-ref/non-ref * * Fields are visible because direct field access for this particular class has a major performance * impact on the HaplotypeCaller, as noted above, and the class itself is nested within * ReferenceConfidenceModel anyway. */ final double[] genotypeLikelihoods; int refDepth = 0; int nonRefDepth = 0; /** * Creates a new ref-vs-alt result indicating the genotype likelihood vector capacity. * @param likelihoodCapacity the required capacity of the likelihood array, should match the possible number of * genotypes given the number of alleles (always 2), ploidy (arbitrary) less the genotyping * model non-sense genotype count if applies. * @throws IllegalArgumentException if {@code likelihoodCapacity} is negative. */ public RefVsAnyResult(final int likelihoodCapacity) { ParamUtils.isPositiveOrZero(likelihoodCapacity, "likelihood capacity is negative"); genotypeLikelihoods = new double[likelihoodCapacity]; } /** * @return Get the DP (sum of AD values) */ int getDP() { return refDepth + nonRefDepth; } /** * Return the AD fields. Returns a newly allocated array every time. */ int[] getAD() { return new int[]{refDepth, nonRefDepth}; } /** * Returns (a copy of) the array of genotype likelihoods * Caps the het and hom var likelihood values by the hom ref likelihood. * The capping is done on the fly. */ double[] getGenotypeLikelihoodsCappedByHomRefLikelihood() { return DoubleStream.of(genotypeLikelihoods).map(d -> Math.min(d, genotypeLikelihoods[0])).toArray(); } } /** * Create a new ReferenceConfidenceModel * * @param samples the list of all samples we'll be considering with this model * @param header the SAMFileHeader describing the read information (used for debugging) * @param indelInformativeDepthIndelSize the max size of indels to consider when calculating indel informative depths */ public ReferenceConfidenceModel(final SampleList samples, final SAMFileHeader header, final int indelInformativeDepthIndelSize, final int numRefForPrior) { Utils.nonNull(samples, "samples cannot be null"); Utils.validateArg( samples.numberOfSamples() > 0, "samples cannot be empty"); Utils.nonNull(header, "header cannot be empty"); //TODO: code and comment disagree -- which is right? Utils.validateArg( indelInformativeDepthIndelSize >= 0, () -> "indelInformativeDepthIndelSize must be >= 1 but got " + indelInformativeDepthIndelSize); this.samples = samples; this.indelInformativeDepthIndelSize = indelInformativeDepthIndelSize; this.numRefSamplesForPrior = numRefForPrior; this.options = new PosteriorProbabilitiesUtils.PosteriorProbabilitiesOptions(HomoSapiensConstants.SNP_HETEROZYGOSITY, HomoSapiensConstants.INDEL_HETEROZYGOSITY, useInputSamplesAlleleCounts, useMLEAC, ignoreInputSamplesForMissingVariants, useFlatPriorsForIndels); } /** * Get the VCF header lines to include when emitting reference confidence values via {@link #calculateRefConfidence}. * @return a non-null set of VCFHeaderLines */ public Set<VCFHeaderLine> getVCFHeaderLines() { final Set<VCFHeaderLine> headerLines = new LinkedHashSet<>(); headerLines.add(new VCFSimpleHeaderLine(GATKVCFConstants.SYMBOLIC_ALLELE_DEFINITION_HEADER_TAG, GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE_NAME, "Represents any possible alternative allele at this location")); return headerLines; } public List<VariantContext> calculateRefConfidence(final Haplotype refHaplotype, final Collection<Haplotype> calledHaplotypes, final SimpleInterval paddedReferenceLoc, final AssemblyRegion activeRegion, final ReadLikelihoods<Haplotype> readLikelihoods, final PloidyModel ploidyModel, final List<VariantContext> variantCalls) { return calculateRefConfidence(refHaplotype, calledHaplotypes, paddedReferenceLoc, activeRegion, readLikelihoods, ploidyModel, variantCalls, false, Collections.emptyList()); } /** * Calculate the reference confidence for a single sample given the its read data * * Returns a list of variant contexts, one for each position in the {@code activeRegion.getLoc()}, each containing * detailed information about the certainty that the sample is hom-ref for each base in the region. * * * * @param refHaplotype the reference haplotype, used to get the reference bases across activeRegion.getLoc() * @param calledHaplotypes a list of haplotypes that segregate in this region, for realignment of the reads in the * readLikelihoods, corresponding to each reads best haplotype. Must contain the refHaplotype. * @param paddedReferenceLoc the location of refHaplotype (which might be larger than activeRegion.getLoc()) * @param activeRegion the active region we want to get the reference confidence over * @param readLikelihoods a map from a single sample to its PerReadAlleleLikelihoodMap for each haplotype in calledHaplotypes * @param ploidyModel indicate the ploidy of each sample in {@code stratifiedReadMap}. * @param variantCalls calls made in this region. The return result will contain any variant call in this list in the * correct order by genomic position, and any variant in this list will stop us emitting a ref confidence * under any position it covers (for snps and insertions that is 1 bp, but for deletions its the entire ref span) * @return an ordered list of variant contexts that spans activeRegion.getLoc() and includes both reference confidence * contexts as well as calls from variantCalls if any were provided */ public List<VariantContext> calculateRefConfidence(final Haplotype refHaplotype, final Collection<Haplotype> calledHaplotypes, final SimpleInterval paddedReferenceLoc, final AssemblyRegion activeRegion, final ReadLikelihoods<Haplotype> readLikelihoods, final PloidyModel ploidyModel, final List<VariantContext> variantCalls, final boolean applyPriors, final List<VariantContext> VCpriors) { Utils.nonNull(refHaplotype, "refHaplotype cannot be null"); Utils.nonNull(calledHaplotypes, "calledHaplotypes cannot be null"); Utils.validateArg(calledHaplotypes.contains(refHaplotype), "calledHaplotypes must contain the refHaplotype"); Utils.nonNull(paddedReferenceLoc, "paddedReferenceLoc cannot be null"); Utils.nonNull(activeRegion, "activeRegion cannot be null"); Utils.nonNull(readLikelihoods, "readLikelihoods cannot be null"); Utils.validateArg(readLikelihoods.numberOfSamples() == 1, () -> "readLikelihoods must contain exactly one sample but it contained " + readLikelihoods.numberOfSamples()); Utils.validateArg( refHaplotype.length() == activeRegion.getExtendedSpan().size(), () -> "refHaplotype " + refHaplotype.length() + " and activeRegion location size " + activeRegion.getSpan().size() + " are different"); Utils.nonNull(ploidyModel, "the ploidy model cannot be null"); final int ploidy = ploidyModel.samplePloidy(0); // the first sample = the only sample in reference-confidence mode. final SimpleInterval refSpan = activeRegion.getSpan(); final List<ReadPileup> refPileups = getPileupsOverReference(refHaplotype, calledHaplotypes, paddedReferenceLoc, activeRegion, refSpan, readLikelihoods); final byte[] ref = refHaplotype.getBases(); final List<VariantContext> results = new ArrayList<>(refSpan.size()); final String sampleName = readLikelihoods.getSample(0); final int globalRefOffset = refSpan.getStart() - activeRegion.getExtendedSpan().getStart(); for ( final ReadPileup pileup : refPileups ) { final Locatable curPos = pileup.getLocation(); final int offset = curPos.getStart() - refSpan.getStart(); final VariantContext overlappingSite = getOverlappingVariantContext(curPos, variantCalls); final List<VariantContext> currentPriors = getMatchingPriors(curPos, overlappingSite, VCpriors); if ( overlappingSite != null && overlappingSite.getStart() == curPos.getStart() ) { if (applyPriors) { results.add(PosteriorProbabilitiesUtils.calculatePosteriorProbs(overlappingSite, currentPriors, numRefSamplesForPrior, options)); } else { results.add(overlappingSite); } } else { // otherwise emit a reference confidence variant context results.add(makeReferenceConfidenceVariantContext(ploidy, ref, sampleName, globalRefOffset, pileup, curPos, offset, applyPriors, currentPriors)); } } return results; } private VariantContext makeReferenceConfidenceVariantContext(final int ploidy, final byte[] ref, final String sampleName, final int globalRefOffset, final ReadPileup pileup, final Locatable curPos, final int offset, final boolean applyPriors, final List<VariantContext> VCpriors) { // Assume infinite population on a single sample. final int refOffset = offset + globalRefOffset; final byte refBase = ref[refOffset]; final RefVsAnyResult homRefCalc = calcGenotypeLikelihoodsOfRefVsAny(ploidy, pileup, refBase, BASE_QUAL_THRESHOLD, null, true); final Allele refAllele = Allele.create(refBase, true); final List<Allele> refSiteAlleles = Arrays.asList(refAllele, Allele.NON_REF_ALLELE); final VariantContextBuilder vcb = new VariantContextBuilder("HC", curPos.getContig(), curPos.getStart(), curPos.getStart(), refSiteAlleles); final GenotypeBuilder gb = new GenotypeBuilder(sampleName, GATKVariantContextUtils.homozygousAlleleList(refAllele, ploidy)); gb.AD(homRefCalc.getAD()); gb.DP(homRefCalc.getDP()); // genotype likelihood calculation final GenotypeLikelihoods snpGLs = GenotypeLikelihoods.fromLog10Likelihoods(homRefCalc.getGenotypeLikelihoodsCappedByHomRefLikelihood()); final int nIndelInformativeReads = calcNIndelInformativeReads(pileup, refOffset, ref, indelInformativeDepthIndelSize); final GenotypeLikelihoods indelGLs = getIndelPLs(ploidy,nIndelInformativeReads); // now that we have the SNP and indel GLs, we take the one with the least confidence, // as this is the most conservative estimate of our certainty that we are hom-ref. // For example, if the SNP PLs are 0,10,100 and the indel PLs are 0,100,1000 // we are very certain that there's no indel here, but the SNP confidence imply that we are // far less confident that the ref base is actually the only thing here. So we take 0,10,100 // as our GLs for the site. final GenotypeLikelihoods leastConfidenceGLs = getGLwithWorstGQ(indelGLs, snpGLs); final int[] leastConfidenceGLsAsPLs = leastConfidenceGLs.getAsPLs(); gb.GQ(GATKVariantContextUtils.calculateGQFromPLs(leastConfidenceGLsAsPLs)); gb.PL(leastConfidenceGLsAsPLs); if(!applyPriors) { return vcb.genotypes(gb.make()).make(); } else { return PosteriorProbabilitiesUtils.calculatePosteriorProbs(vcb.genotypes(gb.make()).make(), VCpriors, numRefSamplesForPrior, options); //TODO FIXME: after new-qual refactoring, these should be static calls to AF calculator } } /** * Get the GenotypeLikelihoods with the least strong corresponding GQ value * @param gl1 first to consider (cannot be null) * @param gl2 second to consider (cannot be null) * @return gl1 or gl2, whichever has the worst GQ */ @VisibleForTesting GenotypeLikelihoods getGLwithWorstGQ(final GenotypeLikelihoods gl1, final GenotypeLikelihoods gl2) { if (getGQForHomRef(gl1) > getGQForHomRef(gl2)) { return gl1; } else { return gl2; } } private double getGQForHomRef(final GenotypeLikelihoods gls){ return GenotypeLikelihoods.getGQLog10FromLikelihoods(IDX_HOM_REF, gls.getAsVector()); } /** * Get indel PLs corresponding to seeing N nIndelInformativeReads at this site * * @param nInformativeReads the number of reads that inform us about being ref without an indel at this site * @param ploidy the requested ploidy. * @return non-null GenotypeLikelihoods given N */ @VisibleForTesting GenotypeLikelihoods getIndelPLs(final int ploidy, final int nInformativeReads) { return indelPLCache(ploidy, nInformativeReads > MAX_N_INDEL_INFORMATIVE_READS ? MAX_N_INDEL_INFORMATIVE_READS : nInformativeReads); } private GenotypeLikelihoods indelPLCache(final int ploidy, final int nInformativeReads) { return initializeIndelPLCache(ploidy)[nInformativeReads]; } private GenotypeLikelihoods[] initializeIndelPLCache(final int ploidy) { if (indelPLCache.length <= ploidy) { indelPLCache = Arrays.copyOf(indelPLCache, ploidy << 1); } if (indelPLCache[ploidy] != null) { return indelPLCache[ploidy]; } final double denominator = - MathUtils.log10(ploidy); final GenotypeLikelihoods[] result = new GenotypeLikelihoods[MAX_N_INDEL_INFORMATIVE_READS + 1]; //Note: an array of zeros is the right answer for result[0]. result[0] = GenotypeLikelihoods.fromLog10Likelihoods(new double[ploidy + 1]); for( int nInformativeReads = 1; nInformativeReads <= MAX_N_INDEL_INFORMATIVE_READS; nInformativeReads++ ) { final double[] PLs = new double[ploidy + 1]; PLs[0] = nInformativeReads * NO_INDEL_LIKELIHOOD; for (int altCount = 1; altCount <= ploidy; altCount++) { final double refLikelihoodAccum = NO_INDEL_LIKELIHOOD + MathUtils.log10(ploidy - altCount); final double altLikelihoodAccum = INDEL_LIKELIHOOD + MathUtils.log10(altCount); PLs[altCount] = nInformativeReads * (MathUtils.approximateLog10SumLog10(refLikelihoodAccum ,altLikelihoodAccum) + denominator); } result[nInformativeReads] = GenotypeLikelihoods.fromLog10Likelihoods(PLs); } indelPLCache[ploidy] = result; return result; } /** * Calculate the genotype likelihoods for the sample in pileup for being hom-ref contrasted with being ref vs. alt * * @param ploidy target sample ploidy. * @param pileup the read backed pileup containing the data we want to evaluate * @param refBase the reference base at this pileup position * @param minBaseQual the min base quality for a read in the pileup at the pileup position to be included in the calculation * @param hqSoftClips running average data structure (can be null) to collect information about the number of high quality soft clips * @return a RefVsAnyResult genotype call. */ public RefVsAnyResult calcGenotypeLikelihoodsOfRefVsAny(final int ploidy, final ReadPileup pileup, final byte refBase, final byte minBaseQual, final MathUtils.RunningAverage hqSoftClips, final boolean readsWereRealigned) { final int likelihoodCount = ploidy + 1; final double log10Ploidy = MathUtils.log10(ploidy); final RefVsAnyResult result = new RefVsAnyResult(likelihoodCount); int readCount = 0; for (final PileupElement p : pileup) { final byte qual = p.isDeletion() ? REF_MODEL_DELETION_QUAL : p.getQual(); if (!p.isDeletion() && qual <= minBaseQual) { continue; } readCount++; applyPileupElementRefVsNonRefLikelihoodAndCount(refBase, likelihoodCount, log10Ploidy, result, p, qual, hqSoftClips, readsWereRealigned); } final double denominator = readCount * log10Ploidy; for (int i = 0; i < likelihoodCount; i++) { result.genotypeLikelihoods[i] -= denominator; } return result; } private void applyPileupElementRefVsNonRefLikelihoodAndCount(final byte refBase, final int likelihoodCount, final double log10Ploidy, final RefVsAnyResult result, final PileupElement element, final byte qual, final MathUtils.RunningAverage hqSoftClips, final boolean readsWereRealigned) { final boolean isAlt = readsWereRealigned ? isAltAfterAssembly(element, refBase) : isAltBeforeAssembly(element, refBase); final double referenceLikelihood; final double nonRefLikelihood; if (isAlt) { nonRefLikelihood = QualityUtils.qualToProbLog10(qual); referenceLikelihood = QualityUtils.qualToErrorProbLog10(qual) + MathUtils.LOG10_ONE_THIRD; result.nonRefDepth++; } else { referenceLikelihood = QualityUtils.qualToProbLog10(qual); nonRefLikelihood = QualityUtils.qualToErrorProbLog10(qual) + MathUtils.LOG10_ONE_THIRD; result.refDepth++; } // Homozygous likelihoods don't need the logSum trick. result.genotypeLikelihoods[0] += referenceLikelihood + log10Ploidy; result.genotypeLikelihoods[likelihoodCount - 1] += nonRefLikelihood + log10Ploidy; // Heterozygous likelihoods need the logSum trick: for (int i = 1, j = likelihoodCount - 2; i < likelihoodCount - 1; i++, j--) { result.genotypeLikelihoods[i] += MathUtils.approximateLog10SumLog10( referenceLikelihood + MathUtils.log10(j), nonRefLikelihood + MathUtils.log10(i)); } if (isAlt && hqSoftClips != null && element.isNextToSoftClip()) { hqSoftClips.add(AlignmentUtils.calcNumHighQualitySoftClips(element.getRead(), HQ_BASE_QUALITY_SOFTCLIP_THRESHOLD)); } } private boolean isAltBeforeAssembly(final PileupElement element, final byte refBase){ return element.getBase() != refBase || element.isDeletion() || element.isBeforeDeletionStart() || element.isAfterDeletionEnd() || element.isBeforeInsertion() || element.isAfterInsertion() || element.isNextToSoftClip(); } private boolean isAltAfterAssembly(final PileupElement element, final byte refBase){ return element.getBase() != refBase || element.isDeletion(); //we shouldn't have soft clips after assembly } /** * Get a list of pileups that span the entire active region span, in order, one for each position */ private List<ReadPileup> getPileupsOverReference(final Haplotype refHaplotype, final Collection<Haplotype> calledHaplotypes, final SimpleInterval paddedReferenceLoc, final AssemblyRegion activeRegion, final SimpleInterval activeRegionSpan, final ReadLikelihoods<Haplotype> readLikelihoods) { Utils.validateArg(calledHaplotypes.contains(refHaplotype), "calledHaplotypes must contain the refHaplotype"); Utils.validateArg(readLikelihoods.numberOfSamples() == 1, () -> "readLikelihoods must contain exactly one sample but it contained " + readLikelihoods.numberOfSamples()); final List<GATKRead> reads = new ArrayList<>(readLikelihoods.sampleReads(0)); reads.sort(new ReadCoordinateComparator(activeRegion.getHeader())); //because we updated the reads based on the local realignments we have to re-sort or the pileups will be... unpredictable final LocusIteratorByState libs = new LocusIteratorByState(reads.iterator(), LocusIteratorByState.NO_DOWNSAMPLING, false, samples.asSetOfSamples(), activeRegion.getHeader(), true); final int startPos = activeRegionSpan.getStart(); final List<ReadPileup> pileups = new ArrayList<>(activeRegionSpan.getEnd() - startPos); AlignmentContext next = libs.advanceToLocus(startPos, true); for ( int curPos = startPos; curPos <= activeRegionSpan.getEnd(); curPos++ ) { if ( next != null && next.getLocation().getStart() == curPos ) { pileups.add(next.getBasePileup()); next = libs.hasNext() ? libs.next() : null; } else { // no data, so we create empty pileups pileups.add(new ReadPileup(new SimpleInterval(activeRegionSpan.getContig(), curPos, curPos))); } } return pileups; } /** * Return the rightmost variant context in maybeOverlapping that overlaps curPos * * @param curPos non-null genome loc * @param maybeOverlapping a collection of variant contexts that might overlap curPos * @return a VariantContext, or null if none overlaps */ @VisibleForTesting VariantContext getOverlappingVariantContext(final Locatable curPos, final Collection<VariantContext> maybeOverlapping) { final SimpleInterval curPosSI = new SimpleInterval(curPos); VariantContext overlaps = null; for ( final VariantContext vc : maybeOverlapping ) { if ( curPosSI.overlaps(vc) ) { if ( overlaps == null || vc.getStart() > overlaps.getStart() ) { overlaps = vc; } } } return overlaps; } /** * Note that we don't have to match alleles because the PosteriorProbabilitesUtils will take care of that * @param curPos position of interest for genotyping * @param call (may be null) * @param priorList priors within the current ActiveRegion * @return prior VCs representing the same variant position as call */ List<VariantContext> getMatchingPriors(final Locatable curPos, final VariantContext call, final List<VariantContext> priorList) { final int position = call != null ? call.getStart() : curPos.getStart(); return priorList.stream().filter(vc -> position == vc.getStart()).collect(Collectors.toList()); } /** * Compute the sum of mismatching base qualities for readBases aligned to refBases at readStart / refStart * assuming no insertions or deletions in the read w.r.t. the reference * * @param readBases non-null bases of the read * @param readQuals non-null quals of the read * @param readStart the starting position of the read (i.e., that aligns it to a position in the reference) * @param refBases the reference bases * @param refStart the offset into refBases that aligns to the readStart position in readBases * @param maxSum if the sum goes over this value, return immediately * @return the sum of quality scores for readBases that mismatch their corresponding ref bases */ @VisibleForTesting int sumMismatchingQualities(final byte[] readBases, final byte[] readQuals, final int readStart, final byte[] refBases, final int refStart, final int maxSum) { final int n = Math.min(readBases.length - readStart, refBases.length - refStart); int sum = 0; for ( int i = 0; i < n; i++ ) { final byte readBase = readBases[readStart + i]; final byte refBase = refBases[refStart + i]; if ( !Nucleotide.intersect(readBase, refBase) && !(readBase == AlignmentUtils.GAP_CHARACTER)) { sum += readQuals[readStart + i]; if ( sum > maxSum ) { // abort early return sum; } } } return sum; } /** * Compute whether a read is informative to eliminate an indel of size <= maxIndelSize segregating at readStart/refStart * * @param read the read * @param readStart the index with respect to @{param}refBases where the read starts * @param refBases the reference bases * @param refStart the offset into refBases that aligns to the readStart position in readBases * @param maxIndelSize the max indel size to consider for the read to be informative * @return true if read can eliminate the possibility that there's an indel of size <= maxIndelSize segregating at refStart */ @VisibleForTesting boolean isReadInformativeAboutIndelsOfSize(final GATKRead read, final int readStart, final byte[] refBases, final int refStart, final int maxIndelSize) { // fast exit when n bases left < maxIndelSize if( read.getLength() - readStart < maxIndelSize || refBases.length - refStart < maxIndelSize ) { return false; } // We are safe to use the faster no-copy versions of getBases and getBaseQualities here, // since we're not modifying the returned arrays in any way. This makes a small difference // in the HaplotypeCaller profile, since this method is a major hotspot. final byte[] readBases = AlignmentUtils.getBasesAlignedOneToOne(read); //calls getBasesNoCopy if CIGAR is all match final byte[] readQuals = AlignmentUtils.getBaseQualsAlignedOneToOne(read); final int baselineMMSum = sumMismatchingQualities(readBases, readQuals, readStart, refBases, refStart, Integer.MAX_VALUE); // consider each indel size up to max in term, checking if an indel that deletes either the ref bases (deletion // or read bases (insertion) would fit as well as the origin baseline sum of mismatching quality scores for ( int indelSize = 1; indelSize <= maxIndelSize; indelSize++ ) { // check insertions: if (sumMismatchingQualities(readBases, readQuals, readStart + indelSize, refBases, refStart, baselineMMSum) <= baselineMMSum) { return false; } // check deletions: if (sumMismatchingQualities(readBases, readQuals, readStart, refBases, refStart + indelSize, baselineMMSum) <= baselineMMSum) { return false; } } return true; } /** * Calculate the number of indel informative reads at pileup * * @param pileup a pileup * @param pileupOffsetIntoRef index along the reference corresponding to the pileup * @param ref the ref bases * @param maxIndelSize maximum indel size to consider in the informativeness calculation * @return an integer >= 0 */ @VisibleForTesting int calcNIndelInformativeReads(final ReadPileup pileup, final int pileupOffsetIntoRef, final byte[] ref, final int maxIndelSize) { int nInformative = 0; for ( final PileupElement p : pileup ) { // doesn't count as evidence if ( p.isBeforeDeletionStart() || p.isBeforeInsertion() || p.isDeletion() ) { continue; } final int offset = getCigarModifiedOffset(p); if ( isReadInformativeAboutIndelsOfSize(p.getRead(), offset, ref, pileupOffsetIntoRef, maxIndelSize) ) { nInformative++; if( nInformative > MAX_N_INDEL_INFORMATIVE_READS ) { return MAX_N_INDEL_INFORMATIVE_READS; } } } return nInformative; } /** * Calculate the index of the current pileup position against the reference-aligned read * This offset should be representative of the "IGV view" for the read where insertions are collapsed and deletions * are padded so that we can easily count the mismatches against the reference * @param p the PileupElement containing the offset as an index into the read base sequence * @return the new reference-aligned index/offset */ @VisibleForTesting protected int getCigarModifiedOffset (final PileupElement p){ final GATKRead read = p.getRead(); int offset = (p.getCurrentCigarElement().getOperator().consumesReferenceBases() || p.getCurrentCigarElement().getOperator() == CigarOperator.S)? p.getOffsetInCurrentCigar() : 0; for (final CigarElement elem : read.getCigar().getCigarElements().subList(0, p.getCurrentCigarOffset())) { if (elem.getOperator().consumesReferenceBases() || elem.getOperator() == CigarOperator.S) { offset += elem.getLength(); } } return offset; } /** * Create a reference haplotype for an active region * * @param activeRegion the active region * @param refBases the ref bases * @param paddedReferenceLoc the location spanning of the refBases -- can be longer than activeRegion.getLocation() * @return a reference haplotype */ public static Haplotype createReferenceHaplotype(final AssemblyRegion activeRegion, final byte[] refBases, final SimpleInterval paddedReferenceLoc) { Utils.nonNull(activeRegion, "null region"); Utils.nonNull(refBases, "null refBases"); Utils.nonNull(paddedReferenceLoc, "null paddedReferenceLoc"); final int alignmentStart = activeRegion.getExtendedSpan().getStart() - paddedReferenceLoc.getStart(); if ( alignmentStart < 0 ) { throw new IllegalStateException("Bad alignment start in createReferenceHaplotype " + alignmentStart); } final Haplotype refHaplotype = new Haplotype(refBases, true); refHaplotype.setAlignmentStartHapwrtRef(alignmentStart); final Cigar c = new Cigar(); c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M)); refHaplotype.setCigar(c); return refHaplotype; } }
package com.github.badoualy.telegram.tl.api; import com.github.badoualy.telegram.tl.TLContext; import com.github.badoualy.telegram.tl.core.TLVector; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import static com.github.badoualy.telegram.tl.StreamUtils.readInt; import static com.github.badoualy.telegram.tl.StreamUtils.readTLObject; import static com.github.badoualy.telegram.tl.StreamUtils.readTLString; import static com.github.badoualy.telegram.tl.StreamUtils.readTLVector; import static com.github.badoualy.telegram.tl.StreamUtils.writeInt; import static com.github.badoualy.telegram.tl.StreamUtils.writeString; import static com.github.badoualy.telegram.tl.StreamUtils.writeTLObject; import static com.github.badoualy.telegram.tl.StreamUtils.writeTLVector; import static com.github.badoualy.telegram.tl.TLObjectUtils.SIZE_CONSTRUCTOR_ID; import static com.github.badoualy.telegram.tl.TLObjectUtils.SIZE_INT32; import static com.github.badoualy.telegram.tl.TLObjectUtils.computeTLStringSerializedSize; /** * @author Yannick Badoual yann.badoual@gmail.com * @see <a href="http://github.com/badoualy/kotlogram">http://github.com/badoualy/kotlogram</a> */ public class TLMessage extends TLAbsMessage { public static final int CONSTRUCTOR_ID = 0xc09be45f; protected int flags; protected boolean out; protected boolean mentioned; protected boolean mediaUnread; protected boolean silent; protected boolean post; protected Integer fromId; protected TLAbsPeer toId; protected TLMessageFwdHeader fwdFrom; protected Integer viaBotId; protected Integer replyToMsgId; protected int date; protected String message; protected TLAbsMessageMedia media; protected TLAbsReplyMarkup replyMarkup; protected TLVector<TLAbsMessageEntity> entities; protected Integer views; protected Integer editDate; private final String _constructor = "message#c09be45f"; public TLMessage() { } public TLMessage(boolean out, boolean mentioned, boolean mediaUnread, boolean silent, boolean post, int id, Integer fromId, TLAbsPeer toId, TLMessageFwdHeader fwdFrom, Integer viaBotId, Integer replyToMsgId, int date, String message, TLAbsMessageMedia media, TLAbsReplyMarkup replyMarkup, TLVector<TLAbsMessageEntity> entities, Integer views, Integer editDate) { this.out = out; this.mentioned = mentioned; this.mediaUnread = mediaUnread; this.silent = silent; this.post = post; this.id = id; this.fromId = fromId; this.toId = toId; this.fwdFrom = fwdFrom; this.viaBotId = viaBotId; this.replyToMsgId = replyToMsgId; this.date = date; this.message = message; this.media = media; this.replyMarkup = replyMarkup; this.entities = entities; this.views = views; this.editDate = editDate; } private void computeFlags() { flags = 0; flags = out ? (flags | 2) : (flags & ~2); flags = mentioned ? (flags | 16) : (flags & ~16); flags = mediaUnread ? (flags | 32) : (flags & ~32); flags = silent ? (flags | 8192) : (flags & ~8192); flags = post ? (flags | 16384) : (flags & ~16384); flags = fromId != null ? (flags | 256) : (flags & ~256); flags = fwdFrom != null ? (flags | 4) : (flags & ~4); flags = viaBotId != null ? (flags | 2048) : (flags & ~2048); flags = replyToMsgId != null ? (flags | 8) : (flags & ~8); flags = media != null ? (flags | 512) : (flags & ~512); flags = replyMarkup != null ? (flags | 64) : (flags & ~64); flags = entities != null ? (flags | 128) : (flags & ~128); flags = views != null ? (flags | 1024) : (flags & ~1024); flags = editDate != null ? (flags | 32768) : (flags & ~32768); } @Override public void serializeBody(OutputStream stream) throws IOException { computeFlags(); writeInt(flags, stream); writeInt(id, stream); if ((flags & 256) != 0) { if (fromId == null) throwNullFieldException("fromId", flags); writeInt(fromId, stream); } writeTLObject(toId, stream); if ((flags & 4) != 0) { if (fwdFrom == null) throwNullFieldException("fwdFrom", flags); writeTLObject(fwdFrom, stream); } if ((flags & 2048) != 0) { if (viaBotId == null) throwNullFieldException("viaBotId", flags); writeInt(viaBotId, stream); } if ((flags & 8) != 0) { if (replyToMsgId == null) throwNullFieldException("replyToMsgId", flags); writeInt(replyToMsgId, stream); } writeInt(date, stream); writeString(message, stream); if ((flags & 512) != 0) { if (media == null) throwNullFieldException("media", flags); writeTLObject(media, stream); } if ((flags & 64) != 0) { if (replyMarkup == null) throwNullFieldException("replyMarkup", flags); writeTLObject(replyMarkup, stream); } if ((flags & 128) != 0) { if (entities == null) throwNullFieldException("entities", flags); writeTLVector(entities, stream); } if ((flags & 1024) != 0) { if (views == null) throwNullFieldException("views", flags); writeInt(views, stream); } if ((flags & 32768) != 0) { if (editDate == null) throwNullFieldException("editDate", flags); writeInt(editDate, stream); } } @Override @SuppressWarnings({"unchecked", "SimplifiableConditionalExpression"}) public void deserializeBody(InputStream stream, TLContext context) throws IOException { flags = readInt(stream); out = (flags & 2) != 0; mentioned = (flags & 16) != 0; mediaUnread = (flags & 32) != 0; silent = (flags & 8192) != 0; post = (flags & 16384) != 0; id = readInt(stream); fromId = (flags & 256) != 0 ? readInt(stream) : null; toId = readTLObject(stream, context, TLAbsPeer.class, -1); fwdFrom = (flags & 4) != 0 ? readTLObject(stream, context, TLMessageFwdHeader.class, TLMessageFwdHeader.CONSTRUCTOR_ID) : null; viaBotId = (flags & 2048) != 0 ? readInt(stream) : null; replyToMsgId = (flags & 8) != 0 ? readInt(stream) : null; date = readInt(stream); message = readTLString(stream); media = (flags & 512) != 0 ? readTLObject(stream, context, TLAbsMessageMedia.class, -1) : null; replyMarkup = (flags & 64) != 0 ? readTLObject(stream, context, TLAbsReplyMarkup.class, -1) : null; entities = (flags & 128) != 0 ? readTLVector(stream, context) : null; views = (flags & 1024) != 0 ? readInt(stream) : null; editDate = (flags & 32768) != 0 ? readInt(stream) : null; } @Override public int computeSerializedSize() { computeFlags(); int size = SIZE_CONSTRUCTOR_ID; size += SIZE_INT32; size += SIZE_INT32; if ((flags & 256) != 0) { if (fromId == null) throwNullFieldException("fromId", flags); size += SIZE_INT32; } size += toId.computeSerializedSize(); if ((flags & 4) != 0) { if (fwdFrom == null) throwNullFieldException("fwdFrom", flags); size += fwdFrom.computeSerializedSize(); } if ((flags & 2048) != 0) { if (viaBotId == null) throwNullFieldException("viaBotId", flags); size += SIZE_INT32; } if ((flags & 8) != 0) { if (replyToMsgId == null) throwNullFieldException("replyToMsgId", flags); size += SIZE_INT32; } size += SIZE_INT32; size += computeTLStringSerializedSize(message); if ((flags & 512) != 0) { if (media == null) throwNullFieldException("media", flags); size += media.computeSerializedSize(); } if ((flags & 64) != 0) { if (replyMarkup == null) throwNullFieldException("replyMarkup", flags); size += replyMarkup.computeSerializedSize(); } if ((flags & 128) != 0) { if (entities == null) throwNullFieldException("entities", flags); size += entities.computeSerializedSize(); } if ((flags & 1024) != 0) { if (views == null) throwNullFieldException("views", flags); size += SIZE_INT32; } if ((flags & 32768) != 0) { if (editDate == null) throwNullFieldException("editDate", flags); size += SIZE_INT32; } return size; } @Override public String toString() { return _constructor; } @Override public int getConstructorId() { return CONSTRUCTOR_ID; } public boolean getOut() { return out; } public void setOut(boolean out) { this.out = out; } public boolean getMentioned() { return mentioned; } public void setMentioned(boolean mentioned) { this.mentioned = mentioned; } public boolean getMediaUnread() { return mediaUnread; } public void setMediaUnread(boolean mediaUnread) { this.mediaUnread = mediaUnread; } public boolean getSilent() { return silent; } public void setSilent(boolean silent) { this.silent = silent; } public boolean getPost() { return post; } public void setPost(boolean post) { this.post = post; } public int getId() { return id; } public void setId(int id) { this.id = id; } public Integer getFromId() { return fromId; } public void setFromId(Integer fromId) { this.fromId = fromId; } public TLAbsPeer getToId() { return toId; } public void setToId(TLAbsPeer toId) { this.toId = toId; } public TLMessageFwdHeader getFwdFrom() { return fwdFrom; } public void setFwdFrom(TLMessageFwdHeader fwdFrom) { this.fwdFrom = fwdFrom; } public Integer getViaBotId() { return viaBotId; } public void setViaBotId(Integer viaBotId) { this.viaBotId = viaBotId; } public Integer getReplyToMsgId() { return replyToMsgId; } public void setReplyToMsgId(Integer replyToMsgId) { this.replyToMsgId = replyToMsgId; } public int getDate() { return date; } public void setDate(int date) { this.date = date; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public TLAbsMessageMedia getMedia() { return media; } public void setMedia(TLAbsMessageMedia media) { this.media = media; } public TLAbsReplyMarkup getReplyMarkup() { return replyMarkup; } public void setReplyMarkup(TLAbsReplyMarkup replyMarkup) { this.replyMarkup = replyMarkup; } public TLVector<TLAbsMessageEntity> getEntities() { return entities; } public void setEntities(TLVector<TLAbsMessageEntity> entities) { this.entities = entities; } public Integer getViews() { return views; } public void setViews(Integer views) { this.views = views; } public Integer getEditDate() { return editDate; } public void setEditDate(Integer editDate) { this.editDate = editDate; } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package libcore.java.net; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.NetworkInterface; import java.net.UnknownHostException; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; /* J2ObjC removed. import junitparams.JUnitParamsRunner; import junitparams.Parameters; */ import libcore.libcore.util.SerializationTester; import libcore.net.InetAddressUtils; import org.junit.Test; import org.junit.runner.RunWith; /* J2ObjC removed. @RunWith(JUnitParamsRunner.class) */ public class InetAddressTest { private static final byte[] LOOPBACK4_BYTES = new byte[] { 127, 0, 0, 1 }; private static final byte[] LOOPBACK6_BYTES = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 }; private static final String[] INVALID_IPv4_AND_6_NUMERIC_ADDRESSES = new String[] { // IPv4 addresses may not be surrounded by square brackets. "[127.0.0.1]", // Trailing dots are not allowed. "1.2.3.4.", // Nor is any kind of trailing junk. "1.2.3.4hello", // Out of range. "256.2.3.4", "1.256.3.4", "1.2.256.4", "1.2.3.256", // Deprecated. "1.2.3", "1.2", "1", "1234", "0", // Single out the deprecated form of the ANY address. // Older Harmony tests expected this to be resolved to 255.255.255.255. "4294967295", // 0xffffffffL, // Hex. Not supported by Android but supported by the RI. "0x1.0x2.0x3.0x4", "0x7f.0x00.0x00.0x01", "7f.0.0.1", // Octal. Not supported by Android but supported by the RI. In the RI, if any of the numbers // cannot be treated as a decimal the entire IP is interpreted differently, leading to // "0177.00.00.01" -> 177.0.0.1, but "0177.0x0.00.01" -> 127.0.0.1. // Android does not do this. "0256.00.00.01", // Historically, this could have been interpreted as 174.0.0.1. // Negative numbers. "-1.0.0.1", "1.-1.0.1", "1.0.-1.1", "1.0.0.-1", // Invalid IPv6 addresses "FFFF:FFFF", }; private static final String VALID_IPv6_ADDRESSES[] = { "::1.2.3.4", "::", "::", "1::0", "1::", "::1", "FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF", "FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:255.255.255.255", "0:0:0:0:0:0:0:0", "0:0:0:0:0:0:0.0.0.0", "::255.255.255.255", "::FFFF:0.0.0.0", "F:F:F:F:F:F:F:F", }; private static Inet6Address loopback6() throws Exception { return (Inet6Address) InetAddress.getByAddress(LOOPBACK6_BYTES); } private static Inet6Address localhost6() throws Exception { return (Inet6Address) InetAddress.getByAddress("ip6-localhost", LOOPBACK6_BYTES); } public static String[][] validNumericAddressesAndStringRepresentation() { return new String[][]{ // Regular IPv4. { "1.2.3.4", "/1.2.3.4" }, // Regular IPv6. { "2001:4860:800d::68", "/2001:4860:800d::68" }, // Mapped IPv4 { "::ffff:127.0.0.1", "/127.0.0.1" }, // Optional square brackets around IPv6 addresses, including mapped IPv4. { "[2001:4860:800d::68]", "/2001:4860:800d::68" }, { "[::ffff:127.0.0.1]", "/127.0.0.1" }, // Android does not recognize Octal (leading 0) cases: they are treated as decimal. { "0177.00.00.01", "/177.0.0.1" }, }; } /* J2ObjC removed: does not support JUnitParamsRunner. @Parameters(method = "validNumericAddressesAndStringRepresentation") @Test public void test_parseNumericAddress(String address, String expectedString) throws Exception { assertEquals(expectedString, InetAddress.parseNumericAddress(address).toString()); } */ @Test public void test_parseNumericAddress_notNumeric() throws Exception { try { InetAddress.parseNumericAddress("example.com"); // Not numeric. fail(); } catch (IllegalArgumentException expected) { } // Strange special cases, for compatibility with InetAddress.getByName. assertTrue(InetAddress.parseNumericAddress(null).isLoopbackAddress()); assertTrue(InetAddress.parseNumericAddress("").isLoopbackAddress()); } /* J2ObjC removed: does not support JUnitParamsRunner. @Parameters(method = "invalidNumericAddresses") @Test public void test_parseNumericAddress_invalid(String invalid) throws Exception { try { InetAddress.parseNumericAddress(invalid); fail(invalid); } catch (IllegalArgumentException expected) { } } */ public static String[] validNumericAddresses() { return new String[] { // IPv4 "1.2.3.4", "127.0.0.1", // IPv6 "::1", "2001:4860:800d::68", // Mapped IPv4 "::ffff:127.0.0.1", // Optional square brackets around IPv6 addresses, including mapped IPv4. "[2001:4860:800d::68]", "[::ffff:127.0.0.1]", // Android does not handle Octal (leading 0) cases: they are treated as decimal. "0177.00.00.01", }; } /* J2ObjC removed: does not support JUnitParamsRunner. @Parameters(method = "validNumericAddresses") @Test public void test_isNumeric(String valid) throws Exception { assertTrue(InetAddress.isNumeric(valid)); } */ @Test public void test_isNumeric_notNumeric_null() throws Exception { try { boolean result = InetAddress.isNumeric(null); fail("Expected isNumeric(null) to throw a NPE but instead returned " + result); } catch (NullPointerException expected) { } } @Test public void test_isNumeric_notNumeric_empty() throws Exception { assertFalse(InetAddress.isNumeric("")); } @Test public void test_isNumeric_notNumeric() throws Exception { // Negative test assertFalse(InetAddress.isNumeric("example.com")); } /* J2ObjC removed: does not support JUnitParamsRunner. @Parameters(method = "invalidNumericAddresses") @Test public void test_isNumeric_invalid(String invalid) { assertFalse(invalid, InetAddress.isNumeric(invalid)); } */ @Test public void test_isLinkLocalAddress() throws Exception { assertFalse(InetAddress.getByName("127.0.0.1").isLinkLocalAddress()); assertFalse(InetAddress.getByName("::ffff:127.0.0.1").isLinkLocalAddress()); assertTrue(InetAddress.getByName("169.254.1.2").isLinkLocalAddress()); assertFalse(InetAddress.getByName("fec0::").isLinkLocalAddress()); assertTrue(InetAddress.getByName("fe80::").isLinkLocalAddress()); } @Test public void test_isMCSiteLocalAddress() throws Exception { assertFalse(InetAddress.getByName("239.254.255.255").isMCSiteLocal()); assertTrue(InetAddress.getByName("239.255.0.0").isMCSiteLocal()); assertTrue(InetAddress.getByName("239.255.255.255").isMCSiteLocal()); assertFalse(InetAddress.getByName("240.0.0.0").isMCSiteLocal()); assertFalse(InetAddress.getByName("ff06::").isMCSiteLocal()); assertTrue(InetAddress.getByName("ff05::").isMCSiteLocal()); assertTrue(InetAddress.getByName("ff15::").isMCSiteLocal()); } @Test public void test_isReachable() throws Exception { // http://code.google.com/p/android/issues/detail?id=20203 String s = "aced0005737200146a6176612e6e65742e496e6574416464726573732d9b57af" + "9fe3ebdb0200034900076164647265737349000666616d696c794c0008686f737" + "44e616d657400124c6a6176612f6c616e672f537472696e673b78704a7d9d6300" + "00000274000e7777772e676f6f676c652e636f6d"; InetAddress inetAddress = InetAddress.getByName("www.google.com"); new SerializationTester<InetAddress>(inetAddress, s) { @Override protected void verify(InetAddress deserialized) throws Exception { deserialized.isReachable(500); for (NetworkInterface nif : Collections.list(NetworkInterface.getNetworkInterfaces())) { deserialized.isReachable(nif, 20, 500); } } @Override protected boolean equals(InetAddress a, InetAddress b) { return a.getHostName().equals(b.getHostName()); } }.test(); } @Test public void test_isReachable_neverThrows() throws Exception { InetAddress inetAddress = InetAddress.getByName("www.google.com"); final NetworkInterface netIf = NetworkInterface.getByName("dummy0"); if (netIf == null) { System.logI("Skipping test_isReachable_neverThrows because dummy0 isn't available"); return; } assertFalse(inetAddress.isReachable(netIf, 256, 500)); } // IPPROTO_ICMP socket kind requires setting ping_group_range. This is set on boot on Android. // When running on host, make sure you run the command: // sudo sysctl -w net.ipv4.ping_group_range="0 65535" @Test public void test_isReachable_by_ICMP() throws Exception { InetAddress[] inetAddresses = InetAddress.getAllByName("www.google.com"); /* J2ObjC removed. for (InetAddress ia : inetAddresses) { // ICMP is not reliable, allow 5 attempts to each IP address before failing. // If any address is reachable then that's sufficient. if (ia.isReachableByICMP(5 * 1000 /* ICMP timeout *)) { return; } } fail(); */ } @Test public void test_inUnreachable() throws Exception { // IPv6 discard prefix. RFC 6666. final InetAddress blackholeAddress = InetAddress.getByName("100::1"); assertFalse(blackholeAddress.isReachable(1000)); } @Test public void test_isSiteLocalAddress() throws Exception { assertFalse(InetAddress.getByName("144.32.32.1").isSiteLocalAddress()); assertTrue(InetAddress.getByName("10.0.0.1").isSiteLocalAddress()); assertTrue(InetAddress.getByName("172.16.0.1").isSiteLocalAddress()); assertFalse(InetAddress.getByName("172.32.0.1").isSiteLocalAddress()); assertTrue(InetAddress.getByName("192.168.0.1").isSiteLocalAddress()); assertFalse(InetAddress.getByName("fc00::").isSiteLocalAddress()); assertTrue(InetAddress.getByName("fec0::").isSiteLocalAddress()); } public static String[] invalidNumericAddresses() { return INVALID_IPv4_AND_6_NUMERIC_ADDRESSES; } /* J2ObjC removed: does not support JUnitParamsRunner. @SuppressWarnings("ResultOfMethodCallIgnored") @Parameters(method = "invalidNumericAddresses") @Test public void test_getByName_invalid(String invalid) throws Exception { try { InetAddress.getByName(invalid); String msg = "Invalid IP address incorrectly recognized as valid: \"" + invalid + "\""; if (InetAddressUtils.parseNumericAddressNoThrowStripOptionalBrackets(invalid) == null) { msg += " (it was probably unexpectedly resolved by this network's DNS)"; } msg += "."; fail(msg); } catch (UnknownHostException expected) { } // exercise negative cache try { InetAddress.getByName(invalid); fail("Invalid IP address incorrectly recognized as valid: " + invalid); } catch (Exception expected) { } } */ public static String[] validIPv6Addresses() { return VALID_IPv6_ADDRESSES; } /* J2ObjC removed: does not support JUnitParamsRunner. @Parameters(method = "validIPv6Addresses") @Test public void test_getByName_valid(String valid) throws Exception { InetAddress.getByName(valid); // exercise positive cache InetAddress.getByName(valid); // when wrapped in [..] String tempIPAddress = "[" + valid + "]"; InetAddress.getByName(tempIPAddress); } */ @Test public void test_getLoopbackAddress() throws Exception { assertTrue(InetAddress.getLoopbackAddress().isLoopbackAddress()); } @Test public void test_equals() throws Exception { InetAddress addr = InetAddress.getByName("239.191.255.255"); assertTrue(addr.equals(addr)); assertTrue(loopback6().equals(localhost6())); assertFalse(addr.equals(loopback6())); assertTrue(Inet4Address.LOOPBACK.equals(Inet4Address.LOOPBACK)); // http://b/4328294 - the scope id isn't included when comparing Inet6Address instances. byte[] bs = new byte[16]; assertEquals(Inet6Address.getByAddress("1", bs, 1), Inet6Address.getByAddress("2", bs, 2)); } @Test public void test_getHostAddress() throws Exception { assertEquals("::1", localhost6().getHostAddress()); assertEquals("::1", InetAddress.getByName("::1").getHostAddress()); assertEquals("127.0.0.1", Inet4Address.LOOPBACK.getHostAddress()); // IPv4 mapped address assertEquals("127.0.0.1", InetAddress.getByName("::ffff:127.0.0.1").getHostAddress()); InetAddress aAddr = InetAddress.getByName("224.0.0.0"); assertEquals("224.0.0.0", aAddr.getHostAddress()); /* J2ObjC removed. try { InetAddress.getByName("1"); fail(); } catch (UnknownHostException expected) { } */ byte[] bAddr = { (byte) 0xFE, (byte) 0x80, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x02, (byte) 0x11, (byte) 0x25, (byte) 0xFF, (byte) 0xFE, (byte) 0xF8, (byte) 0x7C, (byte) 0xB2 }; aAddr = Inet6Address.getByAddress(bAddr); String aString = aAddr.getHostAddress(); assertTrue(aString.equals("fe80:0:0:0:211:25ff:fef8:7cb2") || aString.equals("fe80::211:25ff:fef8:7cb2")); byte[] cAddr = { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF }; aAddr = Inet6Address.getByAddress(cAddr); assertEquals("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", aAddr.getHostAddress()); byte[] dAddr = { (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00 }; aAddr = Inet6Address.getByAddress(dAddr); aString = aAddr.getHostAddress(); assertTrue(aString.equals("0:0:0:0:0:0:0:0") || aString.equals("::")); byte[] eAddr = { (byte) 0x00, (byte) 0x01, (byte) 0x02, (byte) 0x03, (byte) 0x04, (byte) 0x05, (byte) 0x06, (byte) 0x07, (byte) 0x08, (byte) 0x09, (byte) 0x0a, (byte) 0x0b, (byte) 0x0c, (byte) 0x0d, (byte) 0x0e, (byte) 0x0f }; aAddr = Inet6Address.getByAddress(eAddr); assertEquals("1:203:405:607:809:a0b:c0d:e0f", aAddr.getHostAddress()); byte[] fAddr = { (byte) 0x00, (byte) 0x10, (byte) 0x20, (byte) 0x30, (byte) 0x40, (byte) 0x50, (byte) 0x60, (byte) 0x70, (byte) 0x80, (byte) 0x90, (byte) 0xa0, (byte) 0xb0, (byte) 0xc0, (byte) 0xd0, (byte) 0xe0, (byte) 0xf0 }; aAddr = Inet6Address.getByAddress(fAddr); assertEquals("10:2030:4050:6070:8090:a0b0:c0d0:e0f0", aAddr.getHostAddress()); } @Test public void test_hashCode() throws Exception { InetAddress addr1 = InetAddress.getByName("1.0.0.1"); InetAddress addr2 = InetAddress.getByName("1.0.0.1"); assertTrue(addr1.hashCode() == addr2.hashCode()); assertTrue(loopback6().hashCode() == localhost6().hashCode()); } public static String[][] validAddressesAndStringRepresentation() { return new String[][] { { "::1.2.3.4", "/::1.2.3.4" }, { "::", "/::" }, { "1::0", "/1::" }, { "1::", "/1::" }, { "::1", "/::1" }, { "FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF", "/ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff" }, { "FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:255.255.255.255", "/ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff" }, { "0:0:0:0:0:0:0:0", "/::" }, { "0:0:0:0:0:0:0.0.0.0", "/::" }, }; } /* J2ObjC removed: does not support JUnitParamsRunner. @Parameters(method = "validAddressesAndStringRepresentation") @Test public void test_toString(String address, String expectedString) throws Exception { InetAddress ia = InetAddress.getByName(address); String result = ia.toString(); assertNotNull(result); assertEquals(expectedString, result); } */ @Test public void test_getHostNameCaches() throws Exception { InetAddress inetAddress = InetAddress.getByAddress(LOOPBACK6_BYTES); // There should be no cached name. assertEquals("::1", getHostStringWithoutReverseDns(inetAddress)); /* J2ObjC: getnameinfo() in Inet6AddressImpl.m returns "localhost". // Force the reverse-DNS lookup. assertEquals("ip6-localhost", inetAddress.getHostName()); // The cached name should now be different. assertEquals("ip6-localhost", getHostStringWithoutReverseDns(inetAddress)); */ } @Test public void test_getByAddress_loopbackIpv4() throws Exception { InetAddress inetAddress = InetAddress.getByAddress(LOOPBACK4_BYTES); checkInetAddress(LOOPBACK4_BYTES, "localhost", inetAddress); assertTrue(inetAddress.isLoopbackAddress()); } @Test public void test_getByAddress_loopbackIpv6() throws Exception { InetAddress inetAddress = InetAddress.getByAddress(LOOPBACK6_BYTES); /* J2ObjC: getnameinfo() in Inet6AddressImpl.m returns "localhost". checkInetAddress(LOOPBACK6_BYTES, "ip6-localhost", inetAddress); */ assertTrue(inetAddress.isLoopbackAddress()); } @Test public void test_getByName_loopbackIpv4() throws Exception { InetAddress inetAddress = InetAddress.getByName("127.0.0.1"); checkInetAddress(LOOPBACK4_BYTES, "localhost", inetAddress); assertTrue(inetAddress.isLoopbackAddress()); } @Test public void test_getByName_loopbackIpv6() throws Exception { InetAddress inetAddress = InetAddress.getByName("::1"); /* J2ObjC: getnameinfo() in Inet6AddressImpl.m returns "localhost". checkInetAddress(LOOPBACK6_BYTES, "ip6-localhost", inetAddress); */ assertTrue(inetAddress.isLoopbackAddress()); } @Test public void test_getByName_empty() throws Exception { InetAddress inetAddress = InetAddress.getByName(""); checkInetAddress(LOOPBACK6_BYTES, "ip6-localhost", inetAddress); assertTrue(inetAddress.isLoopbackAddress()); } /* TODO(zgao): b/65289980. @Test public void test_getAllByName_localhost() throws Exception { InetAddress[] inetAddresses = InetAddress.getAllByName("localhost"); assertEquals(1, inetAddresses.length); InetAddress inetAddress = inetAddresses[0]; checkInetAddress(LOOPBACK4_BYTES, "localhost", inetAddress); assertTrue(inetAddress.isLoopbackAddress()); } */ /* TODO(zgao): b/65289980. @Test public void test_getAllByName_ip6_localhost() throws Exception { InetAddress[] inetAddresses = InetAddress.getAllByName("ip6-localhost"); assertEquals(1, inetAddresses.length); InetAddress inetAddress = inetAddresses[0]; checkInetAddress(LOOPBACK6_BYTES, "ip6-localhost", inetAddress); assertTrue(inetAddress.isLoopbackAddress()); } */ @Test public void test_getByName_v6loopback() throws Exception { InetAddress inetAddress = InetAddress.getByName("::1"); Set<InetAddress> expectedLoopbackAddresses = createSet(Inet4Address.LOOPBACK, Inet6Address.LOOPBACK); assertTrue(expectedLoopbackAddresses.contains(inetAddress)); } @Test public void test_getByName_cloning() throws Exception { InetAddress[] addresses = InetAddress.getAllByName(null); InetAddress[] addresses2 = InetAddress.getAllByName(null); assertNotNull(addresses[0]); assertNotNull(addresses[1]); assertNotSame(addresses, addresses2); // Also assert that changes to the return value do not affect the cache // etc. i.e, that we return a copy. addresses[0] = null; addresses2 = InetAddress.getAllByName(null); assertNotNull(addresses2[0]); assertNotNull(addresses2[1]); } @Test public void test_getAllByName_null() throws Exception { InetAddress[] inetAddresses = InetAddress.getAllByName(null); assertEquals(2, inetAddresses.length); Set<InetAddress> expectedLoopbackAddresses = createSet(Inet4Address.LOOPBACK, Inet6Address.LOOPBACK); assertEquals(expectedLoopbackAddresses, createSet(inetAddresses)); } // http://b/29311351 @Test public void test_loopbackConstantsPreInitializedNames() { // Note: Inet6Address / Inet4Address equals() does not check host name. assertEquals("ip6-localhost", getHostStringWithoutReverseDns(Inet6Address.LOOPBACK)); assertEquals("localhost", getHostStringWithoutReverseDns(Inet4Address.LOOPBACK)); } private static void checkInetAddress( byte[] expectedAddressBytes, String expectedHostname, InetAddress actual) { assertArrayEquals(expectedAddressBytes, actual.getAddress()); assertEquals(expectedHostname, actual.getHostName()); } private static void assertArrayEquals(byte[] expected, byte[] actual) { assertTrue("Expected=" + Arrays.toString(expected) + ", actual=" + Arrays.toString(actual), Arrays.equals(expected, actual)); } private static Set<InetAddress> createSet(InetAddress... members) { return new HashSet<InetAddress>(Arrays.asList(members)); } private static String getHostStringWithoutReverseDns(InetAddress inetAddress) { // The InetAddress API provides no way of avoiding a DNS lookup, but InetSocketAddress // does via InetSocketAddress.getHostString(). InetSocketAddress inetSocketAddress = new InetSocketAddress(inetAddress, 9999); return inetSocketAddress.getHostString(); } }
/* * * Derby - Class org.apache.derbyTesting.functionTests.util.SecurityManagerSetup * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.derbyTesting.junit; import java.io.File; import java.net.URL; import java.security.AccessController; import java.security.Policy; import java.util.Enumeration; import java.util.Properties; import junit.extensions.TestSetup; import junit.framework.Test; import junit.framework.TestSuite; /** * Setup for running Derby JUnit tests with the SecurityManager * which is the default for tests. * */ public final class SecurityManagerSetup extends TestSetup { /** * Does the JVM support Subjects for * authorization through Java security manager. * J2ME/CDC/Foundation 1.1 does not support Subjects. */ public static final boolean JVM_HAS_SUBJECT_AUTHORIZATION; static { JVM_HAS_SUBJECT_AUTHORIZATION = JDBC.haveClass("javax.security.auth.Subject"); } private static final Properties classPathSet = new Properties(); /** * True if the classes are loaded from jars. */ static boolean isJars; /** * True if a security manager was installed outside of the * control of this class and BaseTestCase. */ private static final boolean externalSecurityManagerInstalled; static { // Determine what the set of properties // describing the environment is. externalSecurityManagerInstalled = determineClasspath(); } private final String decoratorPolicyResource; public SecurityManagerSetup(Test test, String policyResource) { super(test); this.decoratorPolicyResource = policyResource; } /** * Get a decorator that will ensure no security manger * is installed to run a test. Not supported for suites. * <BR> * An empty suite is returned if a security manager was installed * externally, i.e. not under the control of the BaseTestCase * and this code. In this case the code can not support the * mode of no security manager as it may not have enough information * to re-install the security manager. So the passed in test * will be skipped. * * @param test Test to run without a security manager. Note that * this must be an instance of BaseTestCase as this call depends * on setup code in that class. Arbitrary Test instances cannot be passed in. */ public static Test noSecurityManager(Test test) { if (externalSecurityManagerInstalled) return new TestSuite("skipped due to external security manager " + test.toString()); return new SecurityManagerSetup(test, "<NONE>"); } /** * "Install" no security manager. * */ static void noSecurityManager() { installSecurityManager("<NONE>"); } /** * Install specific polciy file with the security manager * including the special case of no security manager. */ protected void setUp() { installSecurityManager(decoratorPolicyResource); } protected void tearDown() throws Exception { if ("<NONE>".equals(decoratorPolicyResource)) BaseTestCase.setSystemProperty("java.security.policy", ""); else if ( !externalSecurityManagerInstalled ) { uninstallSecurityManager(); } } /** * Return the name of the default policy. */ public static String getDefaultPolicy() { return "org/apache/derbyTesting/functionTests/util/derby_tests.policy"; } /** * Install a SecurityManager with the default test policy * file: * org/apache/derbyTesting/functionTests/util/derby_tests.policy * */ static void installSecurityManager() { installSecurityManager( getDefaultPolicy() ); } private static void installSecurityManager(String policyFile) { if (externalSecurityManagerInstalled) return; Properties set = new Properties(classPathSet); setSecurityPolicy(set, policyFile); SecurityManager sm = System.getSecurityManager(); if (sm != null) { // SecurityManager installed, see if it has the same settings. String newPolicyProperty = set.getProperty("java.security.policy" ); if ( newPolicyProperty == null ) { newPolicyProperty = ""; } String oldPolicyProperty = BaseTestCase.getSystemProperty("java.security.policy"); if ( oldPolicyProperty == null ) { oldPolicyProperty = ""; } if ( newPolicyProperty.equals( oldPolicyProperty ) ) { return; } // Uninstall the current manager. uninstallSecurityManager(); } // Set the system properties from the desired set. for (Enumeration e = set.propertyNames(); e.hasMoreElements();) { String key = (String) e.nextElement(); BaseTestCase.setSystemProperty(key, set.getProperty(key)); } // Check indicator for no security manager if ("<NONE>".equals(set.getProperty("java.security.policy"))) return; // and install AccessController.doPrivileged(new java.security.PrivilegedAction() { public Object run() { SecurityManager sm = new SecurityManager(); System.setSecurityManager(sm); Policy.getPolicy().refresh(); return null; } }); } private static void setSecurityPolicy(Properties set, String policyResource) { if ("<NONE>".equals(policyResource)) { set.setProperty("java.security.policy", policyResource); return; } URL policyURL = BaseTestCase.getTestResource(policyResource); // maybe the passed in resource was an URL to begin with if ( policyURL == null ) { try { policyURL = new URL( policyResource ); } catch (Exception e) { System.out.println( "Unreadable url: " + policyResource ); } } if (policyURL != null) { set.setProperty("java.security.policy", policyURL.toExternalForm()); } } /** * Determine the settings of the classpath in order to configure * the variables used in the testing policy files. * Looks for three items: * * Location of derbyTesting.jar via this class * Location of gemfirexd.jar via com.pivotal.gemfirexd.internal.jdbc.EmbeddedSimpleDataSource * Location of gemfirexd-client.jar via com.pivotal.gemfirexd.internal.jdbc.ClientDataSource * * Two options are supported, either all are in jar files or * all are on the classpath. Properties are set as follows: * * <P> * Classpath: * <BR> * derbyTesting.codeclasses set to URL of classes folder * <P> * Jar files: * <BR> * derbyTesting.codejar - URL of gemfirexd.jar, * gemfirexd.jar and gemfirexd-tools.jar, all assumed to be in the * same location. * <BR> * derbyTesting.clientjar - URL of gemfirexd-client.jar * <BR> * derbyTesting.testjar - URL of derbyTesting.jar * <BR> * derbyTesting.testjarpath - File system path to derbyTesting.jar * if the jar has a URL with a file protocol. * */ private static boolean determineClasspath() { // Security manager already installed, assume that // it is set up correctly. if (System.getSecurityManager() != null) { return true; } //We need the junit classes to instantiate this class, so the //following should not cause runtime errors. URL junit = getURL(junit.framework.Test.class); if (junit != null) classPathSet.setProperty("derbyTesting.junit", junit.toExternalForm()); // Load indirectly so we don't need ant-junit.jar at compile time. URL antjunit = getURL("org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner"); if (antjunit != null) classPathSet.setProperty("derbyTesting.antjunit", antjunit.toExternalForm()); // Load indirectly, normally no EMMA jars in the classpath. // This property is needed to set correct permissions in policy files. URL emma = getURL("com.vladium.emma.EMMAException"); if (emma != null) { classPathSet.setProperty("emma.active", ""); classPathSet.setProperty("derbyTesting.emma", emma.toExternalForm()); } /* When inserting XML values that use external DTD's, the JAXP * parser needs permission to read the DTD files. So here we set * a property to hold the location of the JAXP implementation * jar file. We can then grant the JAXP impl the permissions * needed for reading the DTD files. */ String jaxp = XML.getJAXPParserLocation(); if (jaxp != null) classPathSet.setProperty("derbyTesting.jaxpjar", jaxp); URL testing = getURL(SecurityManagerSetup.class); boolean isClasspath = testing.toExternalForm().endsWith("/"); if (isClasspath) { classPathSet.setProperty("derbyTesting.codeclasses", testing.toExternalForm()); isJars = false; return false; } classPathSet.setProperty("derbyTesting.testjar", stripJar(testing)); if (testing.getProtocol().equals("file")) { File f = new File(testing.getPath()); classPathSet.setProperty("derbyTesting.testjarpath", f.getAbsolutePath()); } isJars = true; URL derby = getURL("com.pivotal.gemfirexd.internal.jdbc.EmbeddedSimpleDataSource"); if (derby != null) classPathSet.setProperty("derbyTesting.codejar", stripJar(derby)); // if we attempt to check on availability of the ClientDataSource with // JSR169, attempts will be made to load classes not supported in // that environment, such as javax.naming.Referenceable. See DERBY-2269. if (!JDBC.vmSupportsJSR169()) { URL client = getURL("com.pivotal.gemfirexd.internal.jdbc.ClientDataSource"); if(client != null) classPathSet.setProperty("derbyTesting.clientjar", stripJar(client)); } return false; } /** * Return the policy file system properties for use * by the old test harness. This ensures a consistent * approach to setting the properties. There are the * properties used to define the jar file location in * any policy files. */ public static Properties getPolicyFilePropertiesForOldHarness() { return classPathSet; } /** * Strip off the last token which will be the jar name. * The returned string includes the trailing slash. * @param url * @return the jar name from the URL as a String */ private static String stripJar(URL url) { String ef = url.toExternalForm(); return ef.substring(0, ef.lastIndexOf('/') + 1); } /** * Get the URL of the code base from a class name. * If the class cannot be loaded, null is returned. */ public static URL getURL(String className) { try { return getURL(Class.forName(className)); } catch (ClassNotFoundException e) { return null; } } /** * Get the URL of the code base from a class. */ static URL getURL(final Class cl) { return (URL) AccessController.doPrivileged(new java.security.PrivilegedAction() { public Object run() { /* It's possible that the class does not have a "codeSource" * associated with it (ex. if it is embedded within the JVM, * as can happen with Xalan and/or a JAXP parser), so in that * case we just return null. */ if (cl.getProtectionDomain().getCodeSource() == null) return null; return cl.getProtectionDomain().getCodeSource().getLocation(); } }); } /** * Remove the security manager. */ private static void uninstallSecurityManager() { AccessController.doPrivileged ( new java.security.PrivilegedAction() { public Object run() { System.setSecurityManager(null); return null; } } ); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner.optimizations; import com.facebook.presto.Session; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.planner.DependencyExtractor; import com.facebook.presto.sql.planner.PartitioningScheme; import com.facebook.presto.sql.planner.PlanNodeIdAllocator; import com.facebook.presto.sql.planner.Symbol; import com.facebook.presto.sql.planner.SymbolAllocator; import com.facebook.presto.sql.planner.plan.AggregationNode; import com.facebook.presto.sql.planner.plan.AggregationNode.Aggregation; import com.facebook.presto.sql.planner.plan.ApplyNode; import com.facebook.presto.sql.planner.plan.AssignUniqueId; import com.facebook.presto.sql.planner.plan.Assignments; import com.facebook.presto.sql.planner.plan.DeleteNode; import com.facebook.presto.sql.planner.plan.DistinctLimitNode; import com.facebook.presto.sql.planner.plan.ExceptNode; import com.facebook.presto.sql.planner.plan.ExchangeNode; import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode; import com.facebook.presto.sql.planner.plan.FilterNode; import com.facebook.presto.sql.planner.plan.GroupIdNode; import com.facebook.presto.sql.planner.plan.IndexJoinNode; import com.facebook.presto.sql.planner.plan.IndexSourceNode; import com.facebook.presto.sql.planner.plan.IntersectNode; import com.facebook.presto.sql.planner.plan.JoinNode; import com.facebook.presto.sql.planner.plan.LateralJoinNode; import com.facebook.presto.sql.planner.plan.LimitNode; import com.facebook.presto.sql.planner.plan.MarkDistinctNode; import com.facebook.presto.sql.planner.plan.OutputNode; import com.facebook.presto.sql.planner.plan.PlanNode; import com.facebook.presto.sql.planner.plan.ProjectNode; import com.facebook.presto.sql.planner.plan.RowNumberNode; import com.facebook.presto.sql.planner.plan.SemiJoinNode; import com.facebook.presto.sql.planner.plan.SetOperationNode; import com.facebook.presto.sql.planner.plan.SimplePlanRewriter; import com.facebook.presto.sql.planner.plan.SortNode; import com.facebook.presto.sql.planner.plan.TableFinishNode; import com.facebook.presto.sql.planner.plan.TableScanNode; import com.facebook.presto.sql.planner.plan.TableWriterNode; import com.facebook.presto.sql.planner.plan.TopNNode; import com.facebook.presto.sql.planner.plan.TopNRowNumberNode; import com.facebook.presto.sql.planner.plan.UnionNode; import com.facebook.presto.sql.planner.plan.UnnestNode; import com.facebook.presto.sql.planner.plan.ValuesNode; import com.facebook.presto.sql.planner.plan.WindowNode; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.FunctionCall; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.ListMultimap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import static com.google.common.base.Predicates.in; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.Iterables.concat; import static com.google.common.collect.Sets.intersection; import static java.util.Objects.requireNonNull; /** * Removes all computation that does is not referenced transitively from the root of the plan * <p> * E.g., * <p> * {@code Output[$0] -> Project[$0 := $1 + $2, $3 = $4 / $5] -> ...} * <p> * gets rewritten as * <p> * {@code Output[$0] -> Project[$0 := $1 + $2] -> ...} */ public class PruneUnreferencedOutputs implements PlanOptimizer { @Override public PlanNode optimize(PlanNode plan, Session session, Map<Symbol, Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator) { requireNonNull(plan, "plan is null"); requireNonNull(session, "session is null"); requireNonNull(types, "types is null"); requireNonNull(symbolAllocator, "symbolAllocator is null"); requireNonNull(idAllocator, "idAllocator is null"); return SimplePlanRewriter.rewriteWith(new Rewriter(), plan, ImmutableSet.of()); } private static class Rewriter extends SimplePlanRewriter<Set<Symbol>> { @Override public PlanNode visitExplainAnalyze(ExplainAnalyzeNode node, RewriteContext<Set<Symbol>> context) { return context.defaultRewrite(node, ImmutableSet.copyOf(node.getSource().getOutputSymbols())); } @Override public PlanNode visitExchange(ExchangeNode node, RewriteContext<Set<Symbol>> context) { Set<Symbol> expectedOutputSymbols = Sets.newHashSet(context.get()); node.getPartitioningScheme().getHashColumn().ifPresent(expectedOutputSymbols::add); node.getPartitioningScheme().getPartitioning().getColumns().stream() .forEach(expectedOutputSymbols::add); List<List<Symbol>> inputsBySource = new ArrayList<>(node.getInputs().size()); for (int i = 0; i < node.getInputs().size(); i++) { inputsBySource.add(new ArrayList<>()); } List<Symbol> newOutputSymbols = new ArrayList<>(node.getOutputSymbols().size()); for (int i = 0; i < node.getOutputSymbols().size(); i++) { Symbol outputSymbol = node.getOutputSymbols().get(i); if (expectedOutputSymbols.contains(outputSymbol)) { newOutputSymbols.add(outputSymbol); for (int source = 0; source < node.getInputs().size(); source++) { inputsBySource.get(source).add(node.getInputs().get(source).get(i)); } } } // newOutputSymbols contains all partition and hash symbols so simply swap the output layout PartitioningScheme partitioningScheme = new PartitioningScheme( node.getPartitioningScheme().getPartitioning(), newOutputSymbols, node.getPartitioningScheme().getHashColumn(), node.getPartitioningScheme().isReplicateNullsAndAny(), node.getPartitioningScheme().getBucketToPartition()); ImmutableList.Builder<PlanNode> rewrittenSources = ImmutableList.builder(); for (int i = 0; i < node.getSources().size(); i++) { ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(inputsBySource.get(i)); rewrittenSources.add(context.rewrite( node.getSources().get(i), expectedInputs.build())); } return new ExchangeNode( node.getId(), node.getType(), node.getScope(), partitioningScheme, rewrittenSources.build(), inputsBySource); } @Override public PlanNode visitJoin(JoinNode node, RewriteContext<Set<Symbol>> context) { Set<Symbol> expectedFilterInputs = new HashSet<>(); if (node.getFilter().isPresent()) { expectedFilterInputs = ImmutableSet.<Symbol>builder() .addAll(DependencyExtractor.extractUnique(node.getFilter().get())) .addAll(context.get()) .build(); } ImmutableSet.Builder<Symbol> leftInputsBuilder = ImmutableSet.builder(); leftInputsBuilder.addAll(context.get()).addAll(Iterables.transform(node.getCriteria(), JoinNode.EquiJoinClause::getLeft)); if (node.getLeftHashSymbol().isPresent()) { leftInputsBuilder.add(node.getLeftHashSymbol().get()); } leftInputsBuilder.addAll(expectedFilterInputs); Set<Symbol> leftInputs = leftInputsBuilder.build(); ImmutableSet.Builder<Symbol> rightInputsBuilder = ImmutableSet.builder(); rightInputsBuilder.addAll(context.get()).addAll(Iterables.transform(node.getCriteria(), JoinNode.EquiJoinClause::getRight)); if (node.getRightHashSymbol().isPresent()) { rightInputsBuilder.add(node.getRightHashSymbol().get()); } rightInputsBuilder.addAll(expectedFilterInputs); Set<Symbol> rightInputs = rightInputsBuilder.build(); PlanNode left = context.rewrite(node.getLeft(), leftInputs); PlanNode right = context.rewrite(node.getRight(), rightInputs); List<Symbol> outputSymbols; if (node.isCrossJoin()) { // do not prune nested joins output since it is not supported // TODO: remove this "if" branch when output symbols selection is supported by nested loop join outputSymbols = ImmutableList.<Symbol>builder() .addAll(left.getOutputSymbols()) .addAll(right.getOutputSymbols()) .build(); } else { Set<Symbol> seenSymbol = new HashSet<>(); outputSymbols = node.getOutputSymbols().stream() .filter(context.get()::contains) .filter(seenSymbol::add) .collect(toImmutableList()); } return new JoinNode(node.getId(), node.getType(), left, right, node.getCriteria(), outputSymbols, node.getFilter(), node.getLeftHashSymbol(), node.getRightHashSymbol(), node.getDistributionType()); } @Override public PlanNode visitSemiJoin(SemiJoinNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> sourceInputsBuilder = ImmutableSet.builder(); sourceInputsBuilder.addAll(context.get()).add(node.getSourceJoinSymbol()); if (node.getSourceHashSymbol().isPresent()) { sourceInputsBuilder.add(node.getSourceHashSymbol().get()); } Set<Symbol> sourceInputs = sourceInputsBuilder.build(); ImmutableSet.Builder<Symbol> filteringSourceInputBuilder = ImmutableSet.builder(); filteringSourceInputBuilder.add(node.getFilteringSourceJoinSymbol()); if (node.getFilteringSourceHashSymbol().isPresent()) { filteringSourceInputBuilder.add(node.getFilteringSourceHashSymbol().get()); } Set<Symbol> filteringSourceInputs = filteringSourceInputBuilder.build(); PlanNode source = context.rewrite(node.getSource(), sourceInputs); PlanNode filteringSource = context.rewrite(node.getFilteringSource(), filteringSourceInputs); return new SemiJoinNode(node.getId(), source, filteringSource, node.getSourceJoinSymbol(), node.getFilteringSourceJoinSymbol(), node.getSemiJoinOutput(), node.getSourceHashSymbol(), node.getFilteringSourceHashSymbol(), node.getDistributionType()); } @Override public PlanNode visitIndexJoin(IndexJoinNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> probeInputsBuilder = ImmutableSet.builder(); probeInputsBuilder.addAll(context.get()) .addAll(Iterables.transform(node.getCriteria(), IndexJoinNode.EquiJoinClause::getProbe)); if (node.getProbeHashSymbol().isPresent()) { probeInputsBuilder.add(node.getProbeHashSymbol().get()); } Set<Symbol> probeInputs = probeInputsBuilder.build(); ImmutableSet.Builder<Symbol> indexInputBuilder = ImmutableSet.builder(); indexInputBuilder.addAll(context.get()) .addAll(Iterables.transform(node.getCriteria(), IndexJoinNode.EquiJoinClause::getIndex)); if (node.getIndexHashSymbol().isPresent()) { indexInputBuilder.add(node.getIndexHashSymbol().get()); } Set<Symbol> indexInputs = indexInputBuilder.build(); PlanNode probeSource = context.rewrite(node.getProbeSource(), probeInputs); PlanNode indexSource = context.rewrite(node.getIndexSource(), indexInputs); return new IndexJoinNode(node.getId(), node.getType(), probeSource, indexSource, node.getCriteria(), node.getProbeHashSymbol(), node.getIndexHashSymbol()); } @Override public PlanNode visitIndexSource(IndexSourceNode node, RewriteContext<Set<Symbol>> context) { List<Symbol> newOutputSymbols = node.getOutputSymbols().stream() .filter(context.get()::contains) .collect(toImmutableList()); Set<Symbol> newLookupSymbols = node.getLookupSymbols().stream() .filter(context.get()::contains) .collect(toImmutableSet()); Set<Symbol> requiredAssignmentSymbols = context.get(); if (!node.getEffectiveTupleDomain().isNone()) { Set<Symbol> requiredSymbols = Maps.filterValues(node.getAssignments(), in(node.getEffectiveTupleDomain().getDomains().get().keySet())).keySet(); requiredAssignmentSymbols = Sets.union(context.get(), requiredSymbols); } Map<Symbol, ColumnHandle> newAssignments = Maps.filterKeys(node.getAssignments(), in(requiredAssignmentSymbols)); return new IndexSourceNode(node.getId(), node.getIndexHandle(), node.getTableHandle(), node.getLayout(), newLookupSymbols, newOutputSymbols, newAssignments, node.getEffectiveTupleDomain()); } @Override public PlanNode visitAggregation(AggregationNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(node.getGroupingKeys()); if (node.getHashSymbol().isPresent()) { expectedInputs.add(node.getHashSymbol().get()); } ImmutableMap.Builder<Symbol, Aggregation> aggregations = ImmutableMap.builder(); for (Map.Entry<Symbol, Aggregation> entry : node.getAggregations().entrySet()) { Symbol symbol = entry.getKey(); if (context.get().contains(symbol)) { Aggregation aggregation = entry.getValue(); FunctionCall call = aggregation.getCall(); expectedInputs.addAll(DependencyExtractor.extractUnique(call)); if (aggregation.getMask().isPresent()) { expectedInputs.add(aggregation.getMask().get()); } aggregations.put(symbol, new Aggregation(call, aggregation.getSignature(), aggregation.getMask())); } } PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); return new AggregationNode(node.getId(), source, aggregations.build(), node.getGroupingSets(), node.getStep(), node.getHashSymbol(), node.getGroupIdSymbol()); } @Override public PlanNode visitWindow(WindowNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(context.get()) .addAll(node.getPartitionBy()) .addAll(node.getOrderBy()); for (WindowNode.Frame frame : node.getFrames()) { if (frame.getStartValue().isPresent()) { expectedInputs.add(frame.getStartValue().get()); } if (frame.getEndValue().isPresent()) { expectedInputs.add(frame.getEndValue().get()); } } if (node.getHashSymbol().isPresent()) { expectedInputs.add(node.getHashSymbol().get()); } ImmutableMap.Builder<Symbol, WindowNode.Function> functionsBuilder = ImmutableMap.builder(); for (Map.Entry<Symbol, WindowNode.Function> entry : node.getWindowFunctions().entrySet()) { Symbol symbol = entry.getKey(); WindowNode.Function function = entry.getValue(); if (context.get().contains(symbol)) { FunctionCall call = function.getFunctionCall(); expectedInputs.addAll(DependencyExtractor.extractUnique(call)); functionsBuilder.put(symbol, entry.getValue()); } } PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); Map<Symbol, WindowNode.Function> functions = functionsBuilder.build(); if (functions.size() == 0) { return source; } return new WindowNode( node.getId(), source, node.getSpecification(), functions, node.getHashSymbol(), node.getPrePartitionedInputs(), node.getPreSortedOrderPrefix()); } @Override public PlanNode visitTableScan(TableScanNode node, RewriteContext<Set<Symbol>> context) { Set<Symbol> requiredTableScanOutputs = context.get().stream() .filter(node.getOutputSymbols()::contains) .collect(toImmutableSet()); List<Symbol> newOutputSymbols = node.getOutputSymbols().stream() .filter(requiredTableScanOutputs::contains) .collect(toImmutableList()); Map<Symbol, ColumnHandle> newAssignments = Maps.filterKeys(node.getAssignments(), in(requiredTableScanOutputs)); return new TableScanNode( node.getId(), node.getTable(), newOutputSymbols, newAssignments, node.getLayout(), node.getCurrentConstraint(), node.getOriginalConstraint()); } @Override public PlanNode visitFilter(FilterNode node, RewriteContext<Set<Symbol>> context) { Set<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(DependencyExtractor.extractUnique(node.getPredicate())) .addAll(context.get()) .build(); PlanNode source = context.rewrite(node.getSource(), expectedInputs); return new FilterNode(node.getId(), source, node.getPredicate()); } @Override public PlanNode visitGroupId(GroupIdNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.builder(); Map<Symbol, Symbol> newArgumentMappings = node.getArgumentMappings().entrySet().stream() .filter(entry -> context.get().contains(entry.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); expectedInputs.addAll(newArgumentMappings.values()); ImmutableList.Builder<List<Symbol>> newGroupingSets = ImmutableList.builder(); Map<Symbol, Symbol> newGroupingMapping = new HashMap<>(); for (List<Symbol> groupingSet : node.getGroupingSets()) { ImmutableList.Builder<Symbol> newGroupingSet = ImmutableList.builder(); for (Symbol output : groupingSet) { if (context.get().contains(output)) { newGroupingSet.add(output); newGroupingMapping.putIfAbsent(output, node.getGroupingSetMappings().get(output)); expectedInputs.add(node.getGroupingSetMappings().get(output)); } } newGroupingSets.add(newGroupingSet.build()); } PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); return new GroupIdNode(node.getId(), source, newGroupingSets.build(), newGroupingMapping, newArgumentMappings, node.getGroupIdSymbol()); } @Override public PlanNode visitMarkDistinct(MarkDistinctNode node, RewriteContext<Set<Symbol>> context) { if (!context.get().contains(node.getMarkerSymbol())) { return context.rewrite(node.getSource(), context.get()); } ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(node.getDistinctSymbols()) .addAll(context.get().stream() .filter(symbol -> !symbol.equals(node.getMarkerSymbol())) .collect(toImmutableList())); if (node.getHashSymbol().isPresent()) { expectedInputs.add(node.getHashSymbol().get()); } PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); return new MarkDistinctNode(node.getId(), source, node.getMarkerSymbol(), node.getDistinctSymbols(), node.getHashSymbol()); } @Override public PlanNode visitUnnest(UnnestNode node, RewriteContext<Set<Symbol>> context) { List<Symbol> replicateSymbols = node.getReplicateSymbols().stream() .filter(context.get()::contains) .collect(toImmutableList()); Optional<Symbol> ordinalitySymbol = node.getOrdinalitySymbol(); if (ordinalitySymbol.isPresent() && !context.get().contains(ordinalitySymbol.get())) { ordinalitySymbol = Optional.empty(); } Map<Symbol, List<Symbol>> unnestSymbols = node.getUnnestSymbols(); ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(replicateSymbols) .addAll(unnestSymbols.keySet()); PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); return new UnnestNode(node.getId(), source, replicateSymbols, unnestSymbols, ordinalitySymbol); } @Override public PlanNode visitProject(ProjectNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.builder(); Assignments.Builder builder = Assignments.builder(); for (int i = 0; i < node.getOutputSymbols().size(); i++) { Symbol output = node.getOutputSymbols().get(i); Expression expression = node.getAssignments().get(output); if (context.get().contains(output)) { expectedInputs.addAll(DependencyExtractor.extractUnique(expression)); builder.put(output, expression); } } PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); return new ProjectNode(node.getId(), source, builder.build()); } @Override public PlanNode visitOutput(OutputNode node, RewriteContext<Set<Symbol>> context) { Set<Symbol> expectedInputs = ImmutableSet.copyOf(node.getOutputSymbols()); PlanNode source = context.rewrite(node.getSource(), expectedInputs); return new OutputNode(node.getId(), source, node.getColumnNames(), node.getOutputSymbols()); } @Override public PlanNode visitLimit(LimitNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(context.get()); PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); return new LimitNode(node.getId(), source, node.getCount(), node.isPartial()); } @Override public PlanNode visitDistinctLimit(DistinctLimitNode node, RewriteContext<Set<Symbol>> context) { Set<Symbol> expectedInputs; if (node.getHashSymbol().isPresent()) { expectedInputs = ImmutableSet.copyOf(concat(node.getDistinctSymbols(), ImmutableList.of(node.getHashSymbol().get()))); } else { expectedInputs = ImmutableSet.copyOf(node.getDistinctSymbols()); } PlanNode source = context.rewrite(node.getSource(), expectedInputs); return new DistinctLimitNode(node.getId(), source, node.getLimit(), node.isPartial(), node.getDistinctSymbols(), node.getHashSymbol()); } @Override public PlanNode visitTopN(TopNNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(context.get()) .addAll(node.getOrderBy()); PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); return new TopNNode(node.getId(), source, node.getCount(), node.getOrderBy(), node.getOrderings(), node.getStep()); } @Override public PlanNode visitRowNumber(RowNumberNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> inputsBuilder = ImmutableSet.builder(); ImmutableSet.Builder<Symbol> expectedInputs = inputsBuilder .addAll(context.get()) .addAll(node.getPartitionBy()); if (node.getHashSymbol().isPresent()) { inputsBuilder.add(node.getHashSymbol().get()); } PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); return new RowNumberNode(node.getId(), source, node.getPartitionBy(), node.getRowNumberSymbol(), node.getMaxRowCountPerPartition(), node.getHashSymbol()); } @Override public PlanNode visitTopNRowNumber(TopNRowNumberNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(context.get()) .addAll(node.getPartitionBy()) .addAll(node.getOrderBy()); if (node.getHashSymbol().isPresent()) { expectedInputs.add(node.getHashSymbol().get()); } PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); return new TopNRowNumberNode(node.getId(), source, node.getSpecification(), node.getRowNumberSymbol(), node.getMaxRowCountPerPartition(), node.isPartial(), node.getHashSymbol()); } @Override public PlanNode visitSort(SortNode node, RewriteContext<Set<Symbol>> context) { Set<Symbol> expectedInputs = ImmutableSet.copyOf(concat(context.get(), node.getOrderBy())); PlanNode source = context.rewrite(node.getSource(), expectedInputs); return new SortNode(node.getId(), source, node.getOrderBy(), node.getOrderings()); } @Override public PlanNode visitTableWriter(TableWriterNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> expectedInputs = ImmutableSet.<Symbol>builder() .addAll(node.getColumns()); if (node.getPartitioningScheme().isPresent()) { PartitioningScheme partitioningScheme = node.getPartitioningScheme().get(); partitioningScheme.getPartitioning().getColumns().stream() .forEach(expectedInputs::add); partitioningScheme.getHashColumn().ifPresent(expectedInputs::add); } PlanNode source = context.rewrite(node.getSource(), expectedInputs.build()); return new TableWriterNode( node.getId(), source, node.getTarget(), node.getColumns(), node.getColumnNames(), node.getOutputSymbols(), node.getPartitioningScheme()); } @Override public PlanNode visitTableFinish(TableFinishNode node, RewriteContext<Set<Symbol>> context) { // Maintain the existing inputs needed for TableCommitNode PlanNode source = context.rewrite(node.getSource(), ImmutableSet.copyOf(node.getSource().getOutputSymbols())); return new TableFinishNode(node.getId(), source, node.getTarget(), node.getOutputSymbols()); } @Override public PlanNode visitDelete(DeleteNode node, RewriteContext<Set<Symbol>> context) { PlanNode source = context.rewrite(node.getSource(), ImmutableSet.of(node.getRowId())); return new DeleteNode(node.getId(), source, node.getTarget(), node.getRowId(), node.getOutputSymbols()); } @Override public PlanNode visitUnion(UnionNode node, RewriteContext<Set<Symbol>> context) { ListMultimap<Symbol, Symbol> rewrittenSymbolMapping = rewriteSetOperationSymbolMapping(node, context); ImmutableList<PlanNode> rewrittenSubPlans = rewriteSetOperationSubPlans(node, context, rewrittenSymbolMapping); return new UnionNode(node.getId(), rewrittenSubPlans, rewrittenSymbolMapping, ImmutableList.copyOf(rewrittenSymbolMapping.keySet())); } @Override public PlanNode visitIntersect(IntersectNode node, RewriteContext<Set<Symbol>> context) { ListMultimap<Symbol, Symbol> rewrittenSymbolMapping = rewriteSetOperationSymbolMapping(node, context); ImmutableList<PlanNode> rewrittenSubPlans = rewriteSetOperationSubPlans(node, context, rewrittenSymbolMapping); return new IntersectNode(node.getId(), rewrittenSubPlans, rewrittenSymbolMapping, ImmutableList.copyOf(rewrittenSymbolMapping.keySet())); } @Override public PlanNode visitExcept(ExceptNode node, RewriteContext<Set<Symbol>> context) { ListMultimap<Symbol, Symbol> rewrittenSymbolMapping = rewriteSetOperationSymbolMapping(node, context); ImmutableList<PlanNode> rewrittenSubPlans = rewriteSetOperationSubPlans(node, context, rewrittenSymbolMapping); return new ExceptNode(node.getId(), rewrittenSubPlans, rewrittenSymbolMapping, ImmutableList.copyOf(rewrittenSymbolMapping.keySet())); } private ListMultimap<Symbol, Symbol> rewriteSetOperationSymbolMapping(SetOperationNode node, RewriteContext<Set<Symbol>> context) { // Find out which output symbols we need to keep ImmutableListMultimap.Builder<Symbol, Symbol> rewrittenSymbolMappingBuilder = ImmutableListMultimap.builder(); for (Symbol symbol : node.getOutputSymbols()) { if (context.get().contains(symbol)) { rewrittenSymbolMappingBuilder.putAll(symbol, node.getSymbolMapping().get(symbol)); } } return rewrittenSymbolMappingBuilder.build(); } private ImmutableList<PlanNode> rewriteSetOperationSubPlans(SetOperationNode node, RewriteContext<Set<Symbol>> context, ListMultimap<Symbol, Symbol> rewrittenSymbolMapping) { // Find the corresponding input symbol to the remaining output symbols and prune the subplans ImmutableList.Builder<PlanNode> rewrittenSubPlans = ImmutableList.builder(); for (int i = 0; i < node.getSources().size(); i++) { ImmutableSet.Builder<Symbol> expectedInputSymbols = ImmutableSet.builder(); for (Collection<Symbol> symbols : rewrittenSymbolMapping.asMap().values()) { expectedInputSymbols.add(Iterables.get(symbols, i)); } rewrittenSubPlans.add(context.rewrite(node.getSources().get(i), expectedInputSymbols.build())); } return rewrittenSubPlans.build(); } @Override public PlanNode visitValues(ValuesNode node, RewriteContext<Set<Symbol>> context) { ImmutableList.Builder<Symbol> rewrittenOutputSymbolsBuilder = ImmutableList.builder(); ImmutableList.Builder<ImmutableList.Builder<Expression>> rowBuildersBuilder = ImmutableList.builder(); // Initialize builder for each row for (int i = 0; i < node.getRows().size(); i++) { rowBuildersBuilder.add(ImmutableList.builder()); } ImmutableList<ImmutableList.Builder<Expression>> rowBuilders = rowBuildersBuilder.build(); for (int i = 0; i < node.getOutputSymbols().size(); i++) { Symbol outputSymbol = node.getOutputSymbols().get(i); // If output symbol is used if (context.get().contains(outputSymbol)) { rewrittenOutputSymbolsBuilder.add(outputSymbol); // Add the value of the output symbol for each row for (int j = 0; j < node.getRows().size(); j++) { rowBuilders.get(j).add(node.getRows().get(j).get(i)); } } } List<List<Expression>> rewrittenRows = rowBuilders.stream() .map((rowBuilder) -> rowBuilder.build()) .collect(toImmutableList()); return new ValuesNode(node.getId(), rewrittenOutputSymbolsBuilder.build(), rewrittenRows); } @Override public PlanNode visitApply(ApplyNode node, RewriteContext<Set<Symbol>> context) { // remove unused apply nodes if (intersection(node.getSubqueryAssignments().getSymbols(), context.get()).isEmpty()) { return context.rewrite(node.getInput(), context.get()); } // extract symbols required subquery plan ImmutableSet.Builder<Symbol> subqueryAssignmentsSymbolsBuilder = ImmutableSet.builder(); Assignments.Builder subqueryAssignments = Assignments.builder(); for (Map.Entry<Symbol, Expression> entry : node.getSubqueryAssignments().getMap().entrySet()) { Symbol output = entry.getKey(); Expression expression = entry.getValue(); if (context.get().contains(output)) { subqueryAssignmentsSymbolsBuilder.addAll(DependencyExtractor.extractUnique(expression)); subqueryAssignments.put(output, expression); } } Set<Symbol> subqueryAssignmentsSymbols = subqueryAssignmentsSymbolsBuilder.build(); PlanNode subquery = context.rewrite(node.getSubquery(), subqueryAssignmentsSymbols); // prune not used correlation symbols Set<Symbol> subquerySymbols = DependencyExtractor.extractUnique(subquery); List<Symbol> newCorrelation = node.getCorrelation().stream() .filter(subquerySymbols::contains) .collect(toImmutableList()); Set<Symbol> inputContext = ImmutableSet.<Symbol>builder() .addAll(context.get()) .addAll(newCorrelation) .addAll(subqueryAssignmentsSymbols) // need to include those: e.g: "expr" from "expr IN (SELECT 1)" .build(); PlanNode input = context.rewrite(node.getInput(), inputContext); return new ApplyNode(node.getId(), input, subquery, subqueryAssignments.build(), newCorrelation); } @Override public PlanNode visitAssignUniqueId(AssignUniqueId node, RewriteContext<Set<Symbol>> context) { if (!context.get().contains(node.getIdColumn())) { return context.rewrite(node.getSource(), context.get()); } return context.defaultRewrite(node, context.get()); } @Override public PlanNode visitLateralJoin(LateralJoinNode node, RewriteContext<Set<Symbol>> context) { PlanNode subquery = context.rewrite(node.getSubquery(), context.get()); // remove unused lateral nodes if (subquery.getOutputSymbols().isEmpty() && node.getType() == LateralJoinNode.Type.INNER) { return context.rewrite(node.getInput(), context.get()); } // prune not used correlation symbols Set<Symbol> subquerySymbols = DependencyExtractor.extractUnique(subquery); List<Symbol> newCorrelation = node.getCorrelation().stream() .filter(subquerySymbols::contains) .collect(toImmutableList()); Set<Symbol> inputContext = ImmutableSet.<Symbol>builder() .addAll(context.get()) .addAll(newCorrelation) .build(); PlanNode input = context.rewrite(node.getInput(), inputContext); return new LateralJoinNode(node.getId(), input, subquery, newCorrelation, node.getType()); } } }
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rabbitmq.http.client.domain; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; // TODO: clarify the meaning of these and support them with sensible field names @JsonIgnoreProperties({"recv_cnt", "send_cnt", "send_pend"}) @SuppressWarnings("unused") public class ConnectionInfo { private String name; private String node; private String type; private int channels; private String state; private String user; private String vhost; private String protocol; private int port; @JsonProperty("peer_port") private int peerPort; private String host; @JsonProperty("peer_host") private String peerHost; @JsonProperty("frame_max") private int frameMax; @JsonProperty("channel_max") private int channelMax; @JsonProperty("timeout") private int heartbeatTimeout; @JsonProperty("recv_oct") private long octetsReceived; @JsonProperty("recv_oct_details") private RateDetails octetsReceivedDetails; @JsonProperty("send_oct") private long octetsSent; @JsonProperty("send_oct_details") private RateDetails octetsSentDetails; @JsonProperty("ssl") private boolean usesTLS; @JsonProperty("peer_cert_subject") private String peerCertificateSubject; @JsonProperty("peer_cert_issuer") private String peerCertificateIssuer; @JsonProperty("peer_cert_validity") private String peerCertificateValidity; @JsonProperty("auth_mechanism") private String authMechanism; @JsonProperty("ssl_protocol") private String sslProtocol; @JsonProperty("ssl_key_exchange") private String sslKeyExchange; @JsonProperty("ssl_cipher") private String sslCipher; @JsonProperty("ssl_hash") private String sslHash; @JsonProperty("client_properties") private ClientProperties clientProperties; @Override public String toString() { return "ConnectionInfo{" + "name='" + name + '\'' + ", node='" + node + '\'' + ", type='" + type + '\'' + ", channels=" + channels + ", state='" + state + '\'' + ", user='" + user + '\'' + ", vhost='" + vhost + '\'' + ", protocol='" + protocol + '\'' + ", port=" + port + ", peerPort=" + peerPort + ", host='" + host + '\'' + ", peerHost='" + peerHost + '\'' + ", frameMax=" + frameMax + ", channelMax=" + channelMax + ", heartbeatTimeout=" + heartbeatTimeout + ", octetsReceived=" + octetsReceived + ", octetsReceivedDetails=" + octetsReceivedDetails + ", octetsSent=" + octetsSent + ", octetsSentDetails=" + octetsSentDetails + ", usesTLS=" + usesTLS + ", peerCertificateSubject='" + peerCertificateSubject + '\'' + ", peerCertificateIssuer='" + peerCertificateIssuer + '\'' + ", peerCertificateValidity='" + peerCertificateValidity + '\'' + ", authMechanism='" + authMechanism + '\'' + ", sslProtocol='" + sslProtocol + '\'' + ", sslKeyExchange='" + sslKeyExchange + '\'' + ", sslCipher='" + sslCipher + '\'' + ", sslHash='" + sslHash + '\'' + ", clientProperties=" + clientProperties + ", connectedAt=" + connectedAt + '}'; } @JsonProperty("connected_at") private long connectedAt; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getNode() { return node; } public void setNode(String node) { this.node = node; } public String getType() { return type; } public void setType(String type) { this.type = type; } public int getChannels() { return channels; } public void setChannels(int channels) { this.channels = channels; } public String getState() { return state; } public void setState(String state) { this.state = state; } public String getUser() { return user; } public void setUser(String user) { this.user = user; } public String getVhost() { return vhost; } public void setVhost(String vhost) { this.vhost = vhost; } public String getProtocol() { return protocol; } public void setProtocol(String protocol) { this.protocol = protocol; } public int getPort() { return port; } public void setPort(int port) { this.port = port; } public int getPeerPort() { return peerPort; } public void setPeerPort(int peerPort) { this.peerPort = peerPort; } public String getHost() { return host; } public void setHost(String host) { this.host = host; } public String getPeerHost() { return peerHost; } public void setPeerHost(String peerHost) { this.peerHost = peerHost; } public int getFrameMax() { return frameMax; } public void setFrameMax(int frameMax) { this.frameMax = frameMax; } public int getChannelMax() { return channelMax; } public void setChannelMax(int channelMax) { this.channelMax = channelMax; } public long getOctetsReceived() { return octetsReceived; } public void setOctetsReceived(long octetsReceived) { this.octetsReceived = octetsReceived; } public RateDetails getOctetsReceivedDetails() { return octetsReceivedDetails; } public void setOctetsReceivedDetails(RateDetails octetsReceivedDetails) { this.octetsReceivedDetails = octetsReceivedDetails; } public long getOctetsSent() { return octetsSent; } public void setOctetsSent(long octetsSent) { this.octetsSent = octetsSent; } public RateDetails getOctetsSentDetails() { return octetsSentDetails; } public void setOctetsSentDetails(RateDetails octetsSentDetails) { this.octetsSentDetails = octetsSentDetails; } public boolean isUsesTLS() { return usesTLS; } public void setUsesTLS(boolean usesTLS) { this.usesTLS = usesTLS; } public String getPeerCertificateSubject() { return peerCertificateSubject; } public void setPeerCertificateSubject(String peerCertificateSubject) { this.peerCertificateSubject = peerCertificateSubject; } public String getPeerCertificateIssuer() { return peerCertificateIssuer; } public void setPeerCertificateIssuer(String peerCertificateIssuer) { this.peerCertificateIssuer = peerCertificateIssuer; } public String getPeerCertificateValidity() { return peerCertificateValidity; } public void setPeerCertificateValidity(String peerCertificateValidity) { this.peerCertificateValidity = peerCertificateValidity; } public String getAuthMechanism() { return authMechanism; } public void setAuthMechanism(String authMechanism) { this.authMechanism = authMechanism; } public String getSslProtocol() { return sslProtocol; } public void setSslProtocol(String sslProtocol) { this.sslProtocol = sslProtocol; } public String getSslKeyExchange() { return sslKeyExchange; } public void setSslKeyExchange(String sslKeyExchange) { this.sslKeyExchange = sslKeyExchange; } public String getSslCipher() { return sslCipher; } public void setSslCipher(String sslCipher) { this.sslCipher = sslCipher; } public String getSslHash() { return sslHash; } public void setSslHash(String sslHash) { this.sslHash = sslHash; } public ClientProperties getClientProperties() { return clientProperties; } public void setClientProperties(ClientProperties clientProperties) { this.clientProperties = clientProperties; } public int getHeartbeatTimeout() { return heartbeatTimeout; } public void setHeartbeatTimeout(int heartbeatTimeout) { this.heartbeatTimeout = heartbeatTimeout; } public long getConnectedAt() { return connectedAt; } public void setConnectedAt(long connectedAt) { this.connectedAt = connectedAt; } }
package edu.mayo.mprc.sequest; import com.google.common.base.Charsets; import com.google.common.io.Files; import edu.mayo.mprc.MprcException; import edu.mayo.mprc.config.DaemonConfig; import edu.mayo.mprc.config.DependencyResolver; import edu.mayo.mprc.config.ResourceConfig; import edu.mayo.mprc.config.ui.ResourceConfigBase; import edu.mayo.mprc.config.ui.ServiceUiFactory; import edu.mayo.mprc.config.ui.UiBuilder; import edu.mayo.mprc.daemon.worker.Worker; import edu.mayo.mprc.daemon.worker.WorkerFactoryBase; import edu.mayo.mprc.enginedeployment.DeploymentRequest; import edu.mayo.mprc.enginedeployment.DeploymentService; import edu.mayo.mprc.utilities.FileUtilities; import edu.mayo.mprc.utilities.GZipUtilities; import edu.mayo.mprc.utilities.ProcessCaller; import edu.mayo.mprc.utilities.progress.ProgressReporter; import org.apache.log4j.Logger; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.io.*; import java.util.*; /** * This is the daemon that deploys a sequest indexed database using makedb.exe through wine. */ public final class SequestDeploymentService extends DeploymentService<SequestDeploymentResult> { private static final Logger LOGGER = Logger.getLogger(SequestDeploymentService.class); public static final int MAX_SEQUEST_INDEX_LENGTH = 20; public static final String SEQUEST_PARAMS_FILE = "sequest.params_file"; private static final String DEPLOYABLE_DB_FOLDER = "deployableDbFolder"; private static final String ENGINE_ROOT_FOLDER = "engineRootFolder"; private static final String WINE_WRAPPER_SCRIPT = "wineWrapperScript"; public static final String TYPE = "sequestDeployer"; public static final String NAME = "Sequest DB Deployer"; public static final String DESC = "Indexes FASTA databases for Sequest. You need this to run Sequest efficiently - on non-indexed databases the performance suffers."; private File makeDBExe; private File sortExe; private File cmdExe; private String wineWrapperScript; private SequestMappingFactory sequestMappingFactory; private SequestToMakeDBConverter converter; public SequestDeploymentService() { } public String getWineWrapperScript() { return wineWrapperScript; } public void setWineWrapperScript(final String wineWrapperScript) { this.wineWrapperScript = wineWrapperScript; } public boolean isUseWine() { return wineWrapperScript != null && !wineWrapperScript.isEmpty(); } public SequestMappingFactory getSequestMappingFactory() { return sequestMappingFactory; } public void setSequestMappingFactory(final SequestMappingFactory sequestMappingFactory) { this.sequestMappingFactory = sequestMappingFactory; } @Override public void setEngineRootFolder(final File engineRootFolder) { super.setEngineRootFolder(engineRootFolder); makeDBExe = new File(getEngineRootFolder(), "makedb4.exe"); sortExe = new File(getEngineRootFolder(), "sort.exe"); cmdExe = new File(getEngineRootFolder(), "cmd.exe"); } /** * does a number of steps that will deploy the database to Sequest. It is assumed that Sequest is installed on the * system where this process is running. * <p/> * Given: * <dl> * <dt>ParamSet name</dt> * <dd>Orbitrap_SprotRev_Latest_CabC_OxM</dd> * <dt>Deployment folder where database indices should reside finally.</dt> * <dd>dbcurator/SprotRev_20071105/</dd> * <dt>Fasta file, located in the deployment folder.</dt> * <dd>dbcurator/SprotRev_20071105/SprotRev_20071105.fasta</dd> * <dt>makedb.params, copied into temporary folder.</dt> * <dd>shared/Test_2007111501/params/makedb.params</dd> * </dl> * <p/> * This method creates a directory tree: * <ul> * <li>dbcurator/SprotRev_20071105/SprotRev_20071105_Orbitrap_SprotRev_Latest_CabC_OxM.makedb.params</li> * <li>temporary directory: tmp&lt;timestamp&gt;</li> * <li>dbcurator/SprotRev_20071105/tmp&lt;timestamp&gt;/makedb.params -> ../SprotRev_20071105_Orbitrap_SprotRev_Latest_CabC_OxM.makedb.params</li> * <li>sort and cmd executables, eg:<br/> * dbcurator/SprotRev_20071105/tmp/sort.exe -> .../</li> * </ul> * <p/> * It invokes makedb.exe like: * <ul> * <li>wine ${TERMO_TOP}/makedb4.exe -D${db}.fasta -O${uniqueName}_${paramsName}.hdr</li> * </ul> * and parses its output. * <p/> * Outputs: * <ul> * <li>HDR, DGT, IDX files (collectively the sequest index); atomically rename to ../&lt;X&gt;.hdr, etc.</li> * <li>dbcurator/SprotRev_20071105/SprotRev_20071105_Orbitrap_SprotRev_Latest_CabC_OxM.hdr, etc.</li> * <li>makedb.params</li> * <li>dbcurator/SprotRev_20071105/SprotRev_20071105_Orbitrap_SprotRev_Latest_CabC_OxM.makedb.params</li> * </ul> * * @param request the deployment request that we want to perform */ @Override public SequestDeploymentResult performDeployment(final DeploymentRequest request) { final SequestDeploymentResult reportInto = new SequestDeploymentResult(); if (isNoDeploymentNecessary(request, reportInto)) { reportInto.setDeployedFile(request.getCurationFile()); return reportInto; } //key files generated as a result of the deployment. The caller may be interested in these. final List<File> generatedFiles = new ArrayList<File>(); //a directory for holding temporary files while the deployment is happending this will be cleaned up at the end final File tempExecFolder = getExecutionFolder(request); //the directory where the deployment will reside at the end final File deploymentDir = getCurrentDeploymentFolder(request); //the name that should be used to represent the curation and parameter set combination File decompressedFile = null; try { FileUtilities.ensureFolderExists(tempExecFolder); //1. setup the deployment folder final File deploymentFolder = getCurrentDeploymentFolder(request); FileUtilities.ensureFolderExists(deploymentFolder); generatedFiles.add(deploymentFolder); //2. map the fasta file to the deployment folder final File fastaFile = super.getCurationFile(request); final File movedFasta = getDeployedFastaFile(request); // TODO: Look into this, simplify, fix FileUtilities.linkOrCopy(fastaFile, movedFasta, true, true); File toDeploy = movedFasta; reportInto.setDeployedFile(movedFasta); if (GZipUtilities.isGZipped(toDeploy)) { final File decompDest = new File(toDeploy.getAbsolutePath() + "_decomp"); GZipUtilities.decompressFile(toDeploy, decompDest); toDeploy = decompDest; decompressedFile = decompDest; } generatedFiles.add(movedFasta); //3. create a makedb.params in the same folder using the sequest.params file final File mkdbFile = new File(tempExecFolder, tempExecFolder.getName() + ".makedb.params"); final File sequestParamsFile = (File) request.getProperty(SEQUEST_PARAMS_FILE); generateMakeDbFile(sequestParamsFile, toDeploy, mkdbFile); if (!mkdbFile.exists()) { throw new MprcException("makedb parameter file " + mkdbFile.getAbsolutePath() + " is missing."); } //4. link or copy the cmd.exe and sort.exe in the deploymend folder LOGGER.debug("Linking the executable files needed to run makedb:\n" + "\t" + cmdExe.getAbsolutePath() + "\n" + "\t" + sortExe.getAbsolutePath() + "\n"); FileUtilities.linkOrCopy(cmdExe, new File(tempExecFolder, "cmd.exe"), /*allowOverwrite*/false, /*symbolic*/true); FileUtilities.linkOrCopy(sortExe, new File(tempExecFolder, "sort.exe"), false, /*symbolic*/true); FileUtilities.copyFile(mkdbFile, new File(tempExecFolder, "makedb.params"), true); //5. execute makedb4.exe passing in the fasta file and final File hdrFile = new File(tempExecFolder, tempExecFolder.getName() + ".fasta.hdr"); final List<String> cmd = new ArrayList<String>(); if (isUseWine()) { cmd.add(wineWrapperScript); } cmd.add(makeDBExe.getAbsolutePath()); cmd.add("-D" + toDeploy.getAbsolutePath()); cmd.add("-O" + hdrFile.getAbsolutePath()); final ProcessBuilder pb = new ProcessBuilder(cmd); pb.directory(tempExecFolder); final ProcessCaller caller = new ProcessCaller(pb); caller.runAndCheck("Sequest database indexing (makedb)"); //6. Ensure .fasta.hdr was indeed produced if (!hdrFile.exists()) { throw new MprcException("HDR file " + hdrFile.getAbsolutePath() + " was not produced."); } else if (hdrFile.length() == 0) { throw new MprcException("HDR file " + hdrFile.getAbsolutePath() + " has zero length."); } final File newHdr = new File(deploymentDir, hdrFile.getName()); FileUtilities.rename(hdrFile, newHdr); //move it to the permanent location reportInto.setFileToSearchAgainst(newHdr); generatedFiles.add(newHdr); //7. Ensure the .dgt file produced as larger than the original fasta file final File dgtFile = new File(tempExecFolder, tempExecFolder.getName() + ".fasta.dgt"); if (!dgtFile.exists()) { throw new MprcException("The digest file " + dgtFile.getAbsolutePath() + " that was to be created by makedb.exe does not exist."); } if (dgtFile.length() < fastaFile.length()) { throw new MprcException("The digest file " + dgtFile.getAbsolutePath() + " is shorter than corresponding fasta file " + fastaFile.getAbsolutePath() + " (" + dgtFile.length() + " < " + fastaFile.length() + ")"); } //8. Move all of the desired generated files to the directory where they will stay final File newDgt = new File(deploymentDir, dgtFile.getName()); FileUtilities.rename(dgtFile, newDgt); generatedFiles.add(newDgt); final File logFile = new File(tempExecFolder, "makedb.log"); final File newLog = new File(deploymentDir, tempExecFolder.getName() + ".makedb.log"); FileUtilities.rename(logFile, newLog); generatedFiles.add(newLog); final File idxFile = new File(tempExecFolder, tempExecFolder.getName() + ".fasta.idx"); final File newIdx = new File(deploymentDir, idxFile.getName()); FileUtilities.rename(idxFile, newIdx); generatedFiles.add(newIdx); //move makedb file up one level final File mkdbUpOne = new File(deploymentDir, mkdbFile.getName()); FileUtilities.rename(mkdbFile, mkdbUpOne); generatedFiles.add(mkdbUpOne); final File infoFile = new File(FileUtilities.stripExtension(FileUtilities.stripExtension(newIdx.getAbsolutePath())) + ".info"); final StringBuilder info = new StringBuilder(200); info.append("Paramset used: ").append(getParamSetName(request)).append("\n").append("\n") .append("database name: ").append(request.getShortName()) .append("database description: ").append(request.getTitle()) .append("\n"); FileUtilities.writeStringToFile(infoFile, info.toString(), true); generatedFiles.add(infoFile); reportInto.setGeneratedFiles(generatedFiles); } catch (IOException e) { throw new MprcException("Some trouble with all of the file transfers into the deployment folder.", e); } finally { if (decompressedFile != null) { FileUtilities.quietDelete((File) decompressedFile); } FileUtilities.deleteNow(tempExecFolder); } return reportInto; } @Override public SequestDeploymentResult performUndeployment(final DeploymentRequest request) { final SequestDeploymentResult reportResult = new SequestDeploymentResult(); final File deployedFile = getDeployedFastaFile(request); cleanUpDeployedFiles(deployedFile, reportResult); return reportResult; } @Override protected void validateAndDeleteDeploymentRelatedFiles(final File deployedFastaFile, final File deploymentFolder, final List<File> deletedFiles, final List<File> notDeletedFiles) { if (FileUtilities.deleteNow(deploymentFolder)) { deletedFiles.add(deploymentFolder); } else { notDeletedFiles.add(deploymentFolder); } } public SequestToMakeDBConverter getConverter() { return converter; } public void setConverter(final SequestToMakeDBConverter converter) { this.converter = converter; } private File getDeployedFastaFile(final DeploymentRequest request) { return new File(getCurrentDeploymentFolder(request), request.getShortName() + ".fasta"); } /** * Create a make db.params file from the sequest.params file * * @param sequestFile the sequest.params file to convert * @param fastaFile a fasta file we want to use in the makedb.params file * @param makeDbFile the path to the makedb.params file */ private void generateMakeDbFile(final File sequestFile, final File fastaFile, final File makeDbFile) { try { converter.writeMakedbParams( converter.convertSequestParamsFileIntoMakeDBPIC(sequestFile, fastaFile, sequestMappingFactory).toString(), makeDbFile); } catch (IOException e) { throw new MprcException("Could not generate the makedb.params file", e); } } /** * This will go through and find any hdr files that match the given request. If so then we will return true after * adding the hdr and other pertinent information to the reportInto variable * <p/> * If the sequest params file indicates that it is a non-specific search then true will be returned and the fasta file * will just be passed back to the caller. * * @param request the request we want to see if there is a previous deployment that matches. * @param reportInto a result we can add any previous matching deployments to. * @return true if there is an existing deployment that maches */ public boolean isNoDeploymentNecessary(final DeploymentRequest request, final SequestDeploymentResult reportInto) { final File sequestParams = (File) request.getProperty(SEQUEST_PARAMS_FILE); if (specifiesNoEnzyme(sequestParams)) { LOGGER.debug("It has been determined that the params file indicates a non-specific enzyme search so skipping deployment. " + "Search should just be performed against the raw fasta file."); reportInto.setFileToSearchAgainst(request.getCurationFile()); return true; } final File fastaFile = getDeployedFastaFile(request); try { final String want = converter.convertSequestParamsFileIntoMakeDBPIC(sequestParams, fastaFile, sequestMappingFactory).toString(); final File depdir = getCurrentDeploymentFolder(request); if (depdir.exists()) { final FilenameFilter makedbFileFilter = new FilenameFilter() { @Override public boolean accept(final File dir, final String name) { return name.endsWith("makedb.params"); } }; //look for all makedb.params files in this folder and if one would seem to create the same as the given params file would //then we want to find the hdr file that was created from that makedb.params file this will be a hdr file with the same name for (final File file : depdir.listFiles(makedbFileFilter)) { final String found = Files.toString(file, Charsets.US_ASCII); if (found.equals(want)) { final String commonName = file.getAbsolutePath().replace(".makedb.params", ""); final File hdr = new File(commonName + ".fasta.hdr"); if (hdr.exists()) { reportInto.setFileToSearchAgainst(hdr); reportInto.addMessage("No deployment necessary, it was previously deployed."); return true; } else { throw new FileNotFoundException("A makedb file was found but we could not find an associated .fasta.hdr file: " + file.getAbsolutePath()); } } } } return false; } catch (Exception t) { throw new MprcException(t); } } protected String getParamSetName(final DeploymentRequest request) { final File originalSequestParamsFile; final Object givenProperty = request.getProperty(SEQUEST_PARAMS_FILE); if (givenProperty != null) { originalSequestParamsFile = (File) givenProperty; } else { throw new MprcException("Could not find the key 'sequestParamsFile' which signifies a lack of a params file being given."); } return originalSequestParamsFile.getParentFile().getName(); } /** * This finds a folder that we can use for executing. The folder will have the name that should be used as a the unique prefix name * for the files that are generated as well. This should really only be called once but just in case it will store the * execution file it returns on the request and check there first to make sure. * * @param request a request that contains a unique name * @return an existing folder where we can execute the deployment in */ protected synchronized File getExecutionFolder(final DeploymentRequest request) { File executionFolder = (File) request.getProperty("executionFolder"); if (executionFolder != null) { return executionFolder; } String compositeName = request.getShortName() + "_" + getParamSetName(request); if (compositeName.length() > MAX_SEQUEST_INDEX_LENGTH - 2) { compositeName = compositeName.substring(0, MAX_SEQUEST_INDEX_LENGTH - 2); } char incrementer = 'A'; do { executionFolder = new File(getCurrentDeploymentFolder(request), compositeName + "_" + incrementer++); }//we need to check for execution folder existence before checking for previous deployment result files to make sure that we are not currently moving files. while (executionFolder.exists() || !checkForPreviousValidDeployment(getCurrentDeploymentFolder(request), executionFolder.getName())); request.addProperty("executionFolder", executionFolder); LOGGER.debug("Creating execution folder: " + executionFolder.getAbsolutePath()); try { FileUtilities.ensureFolderExists(executionFolder); } catch (Exception t) { throw new MprcException("Could not create an execution folder for Sequest deployment at " + executionFolder.getAbsolutePath(), t); } return executionFolder; } /** * this will take a folder and a prefix for the prefix for all key sequest files to look for in the given folder. * <p/> * If all neccisary files exist then false will be returned indicating that we should not use that name * <p/> * If not all files exists then any that do will be deleted. If any could not be deleted then none will. If they * could be deleted then true will be returned meaning that the prefix is now available. If they could not be deleted * then false is returned indicating that the prefix should not be used. * * @param deploymentFolder the folder to search for files in * @param prefix the prefix for the filenames which should be unique for a sequest deployment * @return true if the prefix is available for use else false */ protected synchronized boolean checkForPreviousValidDeployment(final File deploymentFolder, final String prefix) { final Set<File> necessaryFiles = new HashSet<File>(); necessaryFiles.add(new File(deploymentFolder, prefix + ".fasta.dgt")); necessaryFiles.add(new File(deploymentFolder, prefix + ".fasta.hdr")); necessaryFiles.add(new File(deploymentFolder, prefix + ".fasta.idx")); necessaryFiles.add(new File(deploymentFolder, prefix + ".makedb.log")); necessaryFiles.add(new File(deploymentFolder, prefix + ".makedb.params")); final Set<File> existingFiles = new HashSet<File>(); for (final File necessary : necessaryFiles) { if (necessary.exists()) { existingFiles.add(necessary); } } //if none of the files existed then return true if (existingFiles.isEmpty()) { return true; } else if (existingFiles.size() == necessaryFiles.size()) { return false; } else if (existingFiles.size() < necessaryFiles.size()) { //if we can't delete all of the files then we won't delete all and we will just return false for (final File exisiting : existingFiles) { if (!exisiting.canWrite()) { return false; } } //if we can delete all then do it for (final File existing : existingFiles) { LOGGER.info("Deleting file: " + existing.getAbsolutePath()); FileUtilities.quietDelete(existing); } return true; } else { return false; } } boolean specifiesNoEnzyme(final File sequestParamsFile) { InputStream is = null; try { is = new FileInputStream(sequestParamsFile); //co opting the the properties functionality nafariously final Properties p = new Properties(); p.load(is); return (p.getProperty("enzyme_info").equals("Non-Specific 0 0 - -")); } catch (FileNotFoundException e) { throw new MprcException("Could not find the params file specified at " + sequestParamsFile.getAbsolutePath(), e); } catch (IOException e) { throw new MprcException("Could not read the sequest params file specified at " + sequestParamsFile.getAbsolutePath(), e); } finally { FileUtilities.closeQuietly(is); } } /** * determines the name of the deployment folder we should use. * * @param request the request we can find a unique name from * @return the directory we want to have the created files reside in. */ protected File getCurrentDeploymentFolder(final DeploymentRequest request) { return new File(getDeployableDbFolder(), request.getShortName()); } private static final Map<String, List<ProgressReporter>> CO_DEPLOYMENTS = new HashMap<String, List<ProgressReporter>>(); @Override public Map<String, List<ProgressReporter>> getCoDeployments() { return CO_DEPLOYMENTS; } @Override public String check() { return null; } /** * Configuration for the factory */ public static final class Config extends ResourceConfigBase { public Config() { } public Config(final String deployableDbFolder, final String engineRootFolder, final String wineWrapperScript) { put(DEPLOYABLE_DB_FOLDER, deployableDbFolder); put(ENGINE_ROOT_FOLDER, engineRootFolder); put(WINE_WRAPPER_SCRIPT, wineWrapperScript); } } @Component("sequestDeployerFactory") public static final class Factory extends WorkerFactoryBase<Config> { private SequestMappingFactory sequestMappingFactory; private SequestToMakeDBConverter converter; public Factory() { } public SequestMappingFactory getSequestMappingFactory() { return sequestMappingFactory; } @Resource(name = "sequestMappingFactory") public void setSequestMappingFactory(final SequestMappingFactory sequestMappingFactory) { this.sequestMappingFactory = sequestMappingFactory; } public SequestToMakeDBConverter getConverter() { return converter; } @Resource(name = "sequestToMakeDbConverter") public void setConverter(final SequestToMakeDBConverter converter) { this.converter = converter; } @Override public Worker create(final Config config, final DependencyResolver dependencies) { final SequestDeploymentService worker = new SequestDeploymentService(); worker.setConverter(getConverter()); worker.setSequestMappingFactory(getSequestMappingFactory()); worker.setEngineRootFolder(getFile(config, ENGINE_ROOT_FOLDER)); worker.setDeployableDbFolder(getFile(config, DEPLOYABLE_DB_FOLDER)); worker.setWineWrapperScript(config.get(WINE_WRAPPER_SCRIPT)); return worker; } } public static final class Ui implements ServiceUiFactory { @Override public void createUI(final DaemonConfig daemon, final ResourceConfig resource, final UiBuilder builder) { builder .property(DEPLOYABLE_DB_FOLDER, "Database Folder", "Sequest .fasta index files will be put here.<br/>" + "Warning: Sequest is sensitive to path length to database index. If you are getting Sequest errors, check that the database index is placed " + "at a reasonably short path.") .required() .existingDirectory() .defaultValue("var/sequest_index") .property(ENGINE_ROOT_FOLDER, "Makedb Folder", "Path to the makedb package which can be found in the Swift installation directory:" + "<br/><tt>bin/makedb/</tt>") .required() .existingDirectory() .property(WINE_WRAPPER_SCRIPT, "Wine Wrapper Script", "Sequest deployer executable wine wrapper script, for example, wine and wineconsole." + " The wine executables can be found at <a href=\"http://www.winehq.org/\">http://www.winehq.org</a>") .executable(Arrays.asList("-v")) .defaultValue(daemon.getWrapperScript()); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.settings; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class UpdateSettingsIT extends ESIntegTestCase { public void testInvalidDynamicUpdate() { createIndex("test"); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder().put("index.dummy", "boom")) .execute() .actionGet()); assertEquals(exception.getCause().getMessage(), "this setting goes boom"); IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertNotEquals(indexMetaData.getSettings().get("index.dummy"), "invalid dynamic value"); } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(DummySettingPlugin.class); } public static class DummySettingPlugin extends Plugin { public static final Setting<String> DUMMY_SETTING = Setting.simpleString("index.dummy", Setting.Property.IndexScope, Setting.Property.Dynamic); @Override public void onIndexModule(IndexModule indexModule) { indexModule.addSettingsUpdateConsumer(DUMMY_SETTING, (s) -> {}, (s) -> { if (s.equals("boom")) throw new IllegalArgumentException("this setting goes boom"); }); } @Override public List<Setting<?>> getSettings() { return Collections.singletonList(DUMMY_SETTING); } } public void testResetDefault() { createIndex("test"); client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.translog.flush_threshold_size", "1024b")) .execute() .actionGet(); IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertEquals(indexMetaData.getSettings().get("index.refresh_interval"), "-1"); for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { IndexService indexService = service.indexService(resolveIndex("test")); if (indexService != null) { assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), -1); assertEquals(indexService.getIndexSettings().getFlushThresholdSize().getBytes(), 1024); } } client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder().putNull("index.refresh_interval")) .execute() .actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertNull(indexMetaData.getSettings().get("index.refresh_interval")); for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { IndexService indexService = service.indexService(resolveIndex("test")); if (indexService != null) { assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), 1000); assertEquals(indexService.getIndexSettings().getFlushThresholdSize().getBytes(), 1024); } } } public void testOpenCloseUpdateSettings() throws Exception { createIndex("test"); try { client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder() .put("index.refresh_interval", -1) // this one can change .put("index.fielddata.cache", "none")) // this one can't .execute() .actionGet(); fail(); } catch (IllegalArgumentException e) { // all is well } IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getSettings().get("index.refresh_interval"), nullValue()); assertThat(indexMetaData.getSettings().get("index.fielddata.cache"), nullValue()); // Now verify via dedicated get settings api: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), nullValue()); assertThat(getSettingsResponse.getSetting("test", "index.fielddata.cache"), nullValue()); client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder().put("index.refresh_interval", -1)) // this one can change .execute() .actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getSettings().get("index.refresh_interval"), equalTo("-1")); // Now verify via dedicated get settings api: getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), equalTo("-1")); // now close the index, change the non dynamic setting, and see that it applies // Wait for the index to turn green before attempting to close it ClusterHealthResponse health = client() .admin() .cluster() .prepareHealth() .setTimeout("30s") .setWaitForEvents(Priority.LANGUID) .setWaitForGreenStatus() .execute() .actionGet(); assertThat(health.isTimedOut(), equalTo(false)); client().admin().indices().prepareClose("test").execute().actionGet(); client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)) .execute() .actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getNumberOfReplicas(), equalTo(1)); client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder() .put("index.refresh_interval", "1s") // this one can change .put("index.fielddata.cache", "none")) // this one can't .execute() .actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getSettings().get("index.refresh_interval"), equalTo("1s")); assertThat(indexMetaData.getSettings().get("index.fielddata.cache"), equalTo("none")); // Now verify via dedicated get settings api: getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), equalTo("1s")); assertThat(getSettingsResponse.getSetting("test", "index.fielddata.cache"), equalTo("none")); } public void testEngineGCDeletesSetting() throws InterruptedException { createIndex("test"); client().prepareIndex("test", "type", "1").setSource("f", 1).get(); // set version to 1 client().prepareDelete("test", "type", "1").get(); // sets version to 2 // delete is still in cache this should work & set version to 3 client().prepareIndex("test", "type", "1").setSource("f", 2).setVersion(2).get(); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.gc_deletes", 0)).get(); client().prepareDelete("test", "type", "1").get(); // sets version to 4 Thread.sleep(300); // wait for cache time to change TODO: this needs to be solved better. To be discussed. // delete is should not be in cache assertThrows(client().prepareIndex("test", "type", "1").setSource("f", 3).setVersion(4), VersionConflictEngineException.class); } public void testUpdateSettingsWithBlocks() { createIndex("test"); ensureGreen("test"); Settings.Builder builder = Settings.builder().put("index.refresh_interval", -1); for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE)) { try { enableIndexBlock("test", blockSetting); assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(builder)); } finally { disableIndexBlock("test", blockSetting); } } // Closing an index is blocked for (String blockSetting : Arrays.asList(SETTING_READ_ONLY, SETTING_BLOCKS_METADATA)) { try { enableIndexBlock("test", blockSetting); assertBlocked(client().admin().indices().prepareUpdateSettings("test").setSettings(builder)); } finally { disableIndexBlock("test", blockSetting); } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.raptor.metadata; import com.facebook.presto.metadata.InMemoryNodeManager; import com.facebook.presto.metadata.MetadataUtil.TableMetadataBuilder; import com.facebook.presto.metadata.NodeVersion; import com.facebook.presto.metadata.PrestoNode; import com.facebook.presto.raptor.RaptorColumnHandle; import com.facebook.presto.raptor.RaptorConnectorId; import com.facebook.presto.raptor.RaptorMetadata; import com.facebook.presto.raptor.RaptorSplitManager; import com.facebook.presto.raptor.RaptorTableHandle; import com.facebook.presto.raptor.backup.BackupStore; import com.facebook.presto.raptor.backup.FileBackupStore; import com.facebook.presto.raptor.storage.FileStorageService; import com.facebook.presto.raptor.storage.ShardRecoveryManager; import com.facebook.presto.raptor.storage.StorageManager; import com.facebook.presto.raptor.storage.StorageService; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ConnectorPartition; import com.facebook.presto.spi.ConnectorPartitionResult; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.ConnectorSplit; import com.facebook.presto.spi.ConnectorSplitSource; import com.facebook.presto.spi.ConnectorTableHandle; import com.facebook.presto.spi.ConnectorTableMetadata; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.TupleDomain; import com.facebook.presto.spi.type.BigintType; import com.facebook.presto.type.TypeRegistry; import com.google.common.collect.ImmutableList; import io.airlift.json.JsonCodec; import io.airlift.units.Duration; import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.Handle; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.io.File; import java.net.URI; import java.net.URISyntaxException; import java.util.List; import java.util.Optional; import java.util.UUID; import static com.facebook.presto.raptor.metadata.DatabaseShardManager.shardIndexTable; import static com.facebook.presto.raptor.metadata.TestDatabaseShardManager.shardInfo; import static com.facebook.presto.raptor.storage.TestOrcStorageManager.createOrcStorageManager; import static com.facebook.presto.raptor.util.Types.checkType; import static com.facebook.presto.spi.type.TimeZoneKey.UTC_KEY; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.io.Files.createTempDir; import static io.airlift.concurrent.MoreFutures.getFutureValue; import static io.airlift.json.JsonCodec.jsonCodec; import static io.airlift.testing.FileUtils.deleteRecursively; import static java.lang.String.format; import static java.util.Locale.ENGLISH; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.stream.Collectors.toList; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; @Test(singleThreaded = true) public class TestRaptorSplitManager { private static final JsonCodec<ShardInfo> SHARD_INFO_CODEC = jsonCodec(ShardInfo.class); private static final JsonCodec<ShardDelta> SHARD_DELTA_CODEC = jsonCodec(ShardDelta.class); private static final ConnectorSession SESSION = new ConnectorSession("user", UTC_KEY, ENGLISH, System.currentTimeMillis(), null); private static final ConnectorTableMetadata TEST_TABLE = TableMetadataBuilder.tableMetadataBuilder("demo", "test_table") .partitionKeyColumn("ds", VARCHAR) .column("foo", VARCHAR) .column("bar", BigintType.BIGINT) .build(); private Handle dummyHandle; private File temporary; private RaptorSplitManager raptorSplitManager; private ConnectorTableHandle tableHandle; private ShardManager shardManager; private StorageManager storageManagerWithBackup; private long tableId; @BeforeMethod public void setup() throws Exception { TypeRegistry typeRegistry = new TypeRegistry(); DBI dbi = new DBI("jdbc:h2:mem:test" + System.nanoTime()); dbi.registerMapper(new TableColumn.Mapper(typeRegistry)); dummyHandle = dbi.open(); temporary = createTempDir(); shardManager = new DatabaseShardManager(dbi); InMemoryNodeManager nodeManager = new InMemoryNodeManager(); File dataDir = new File(temporary, "data"); File backupDir = new File(temporary, "backup"); FileBackupStore fileBackupStore = new FileBackupStore(backupDir); fileBackupStore.start(); Optional<BackupStore> backupStore = Optional.of(fileBackupStore); StorageService storageService = new FileStorageService(dataDir); ShardRecoveryManager recoveryManager = new ShardRecoveryManager(storageService, Optional.empty(), new InMemoryNodeManager(), shardManager, new Duration(5, MINUTES), 10); StorageManager storageManager = createOrcStorageManager(storageService, Optional.empty(), recoveryManager); storageManagerWithBackup = createOrcStorageManager(storageService, backupStore, recoveryManager); String nodeName = UUID.randomUUID().toString(); nodeManager.addNode("raptor", new PrestoNode(nodeName, new URI("http://127.0.0.1/"), NodeVersion.UNKNOWN)); RaptorConnectorId connectorId = new RaptorConnectorId("raptor"); RaptorMetadata metadata = new RaptorMetadata(connectorId, dbi, shardManager, SHARD_INFO_CODEC, SHARD_DELTA_CODEC); metadata.createTable(SESSION, TEST_TABLE); tableHandle = metadata.getTableHandle(SESSION, TEST_TABLE.getTable()); List<ShardInfo> shards = ImmutableList.<ShardInfo>builder() .add(shardInfo(UUID.randomUUID(), nodeName)) .add(shardInfo(UUID.randomUUID(), nodeName)) .add(shardInfo(UUID.randomUUID(), nodeName)) .add(shardInfo(UUID.randomUUID(), nodeName)) .build(); tableId = checkType(tableHandle, RaptorTableHandle.class, "tableHandle").getTableId(); List<ColumnInfo> columns = metadata.getColumnHandles(tableHandle).values().stream() .map(handle -> checkType(handle, RaptorColumnHandle.class, "columnHandle")) .map(ColumnInfo::fromHandle) .collect(toList()); shardManager.commitShards(tableId, columns, shards, Optional.empty()); raptorSplitManager = new RaptorSplitManager(connectorId, nodeManager, shardManager, storageManager); } @AfterMethod public void teardown() { dummyHandle.close(); deleteRecursively(temporary); } @Test public void testSanity() throws InterruptedException { ConnectorPartitionResult partitionResult = raptorSplitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all()); assertEquals(partitionResult.getPartitions().size(), 1); assertTrue(partitionResult.getUndeterminedTupleDomain().isAll()); List<ConnectorPartition> partitions = partitionResult.getPartitions(); ConnectorPartition partition = getOnlyElement(partitions); TupleDomain<ColumnHandle> columnUnionedTupleDomain = TupleDomain.columnWiseUnion(partition.getTupleDomain(), partition.getTupleDomain()); assertEquals(columnUnionedTupleDomain, TupleDomain.<ColumnHandle>all()); ConnectorSplitSource splitSource = raptorSplitManager.getPartitionSplits(tableHandle, partitions); int splitCount = 0; while (!splitSource.isFinished()) { splitCount += getFutureValue(splitSource.getNextBatch(1000)).size(); } assertEquals(splitCount, 4); } @Test(expectedExceptions = PrestoException.class, expectedExceptionsMessageRegExp = "No host for shard .* found: \\[\\]") public void testNoHostForShard() throws InterruptedException { deleteShardNodes(); ConnectorPartitionResult result = raptorSplitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all()); ConnectorSplitSource splitSource = raptorSplitManager.getPartitionSplits(tableHandle, result.getPartitions()); getFutureValue(splitSource.getNextBatch(1000)); } @Test public void testAssignRandomNodeWhenBackupAvailable() throws InterruptedException, URISyntaxException { InMemoryNodeManager nodeManager = new InMemoryNodeManager(); PrestoNode node = new PrestoNode(UUID.randomUUID().toString(), new URI("http://127.0.0.1/"), NodeVersion.UNKNOWN); nodeManager.addNode("fbraptor", node); RaptorSplitManager raptorSplitManagerWithBackup = new RaptorSplitManager(new RaptorConnectorId("fbraptor"), nodeManager, shardManager, storageManagerWithBackup); deleteShardNodes(); ConnectorPartitionResult result = raptorSplitManagerWithBackup.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all()); ConnectorSplitSource partitionSplit = raptorSplitManagerWithBackup.getPartitionSplits(tableHandle, result.getPartitions()); List<ConnectorSplit> batch = getFutureValue(partitionSplit.getNextBatch(1), PrestoException.class); assertEquals(getOnlyElement(getOnlyElement(batch).getAddresses()), node.getHostAndPort()); } @Test(expectedExceptions = PrestoException.class, expectedExceptionsMessageRegExp = "No nodes available to run query") public void testNoNodes() throws InterruptedException, URISyntaxException { deleteShardNodes(); RaptorSplitManager raptorSplitManagerWithBackup = new RaptorSplitManager(new RaptorConnectorId("fbraptor"), new InMemoryNodeManager(), shardManager, storageManagerWithBackup); ConnectorPartitionResult result = raptorSplitManagerWithBackup.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all()); ConnectorSplitSource splitSource = raptorSplitManagerWithBackup.getPartitionSplits(tableHandle, result.getPartitions()); getFutureValue(splitSource.getNextBatch(1000), PrestoException.class); } private void deleteShardNodes() { dummyHandle.execute("DELETE FROM shard_nodes"); dummyHandle.execute(format("UPDATE %s SET node_ids = ''", shardIndexTable(tableId))); } }
/* Copyright 2015 Samsung Electronics Co., LTD * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gearvrf.physics; import org.gearvrf.GVRComponent; import org.gearvrf.GVRContext; import org.gearvrf.GVRRenderData; import org.gearvrf.GVRSceneObject; /** * Represents a rigid body that can be static or dynamic. You can set a mass and apply some * physics forces. * <p> * By default it is a static body with infinity mass, value 0, and does not move under simulation. * A dynamic body with a mass defined is fully simulated. * <p> * Every {@linkplain org.gearvrf.GVRSceneObject scene object} can represent a rigid body since * it has a {@link GVRRigidBody} component attached to. * * You must setup the values of owner's {@link org.gearvrf.GVRTransform}, like initial position, * and the mass value of the rigid body before attach it to its owner. */ public class GVRRigidBody extends GVRPhysicsWorldObject { public static final int DYNAMIC = 0; public static final int STATIC = 1; public static final int KINEMATIC = 2; static { System.loadLibrary("gvrf-physics"); } private final int mCollisionGroup; private final GVRPhysicsContext mPhysicsContext; /** * Constructs new instance to simulate a rigid body in {@link GVRWorld}. * * @param gvrContext The context of the app. */ public GVRRigidBody(GVRContext gvrContext) { this(gvrContext, 0.0f); } /** * Constructs new instance to simulate a rigid body in {@link GVRWorld}. * * @param gvrContext The context of the app. * @param mass The mass of this rigid body. */ public GVRRigidBody(GVRContext gvrContext, float mass) { this(gvrContext, mass, -1); } /** * Constructs new instance to simulate a rigid body in {@link GVRWorld}. * * @param gvrContext The context of the app. * @param mass The mass of this rigid body. * @param collisionGroup The id of the collision's group that this rigid body belongs to * in the {@link GVRCollisionMatrix}. The rigid body collides with * everyone if {#collisionGroup} is out of the range 0...15. */ public GVRRigidBody(GVRContext gvrContext, float mass, int collisionGroup) { super(gvrContext, Native3DRigidBody.ctor()); Native3DRigidBody.setMass(getNative(), mass); mCollisionGroup = collisionGroup; mPhysicsContext = GVRPhysicsContext.getInstance(); } static public long getComponentType() { return Native3DRigidBody.getComponentType(); } /** * Returns the {@linkplain GVRWorld physics world} of this {@linkplain GVRRigidBody rigid body}. * * @return The physics world of this {@link GVRRigidBody} */ public GVRWorld getWorld() { return getWorld(getOwnerObject()); } /** * Returns the {@linkplain GVRWorld physics world} of the {@linkplain org.gearvrf.GVRScene scene}. * * @param owner Owner of the {@link GVRRigidBody} * @return Returns the {@link GVRWorld} of the scene. */ private static GVRWorld getWorld(GVRSceneObject owner) { return getWorldFromAscendant(owner); } /** * Looks for {@link GVRWorld} component in the ascendants of the scene. * * @param worldOwner Scene object to search for a physics world in the scene. * @return Physics world from the scene. */ private static GVRWorld getWorldFromAscendant(GVRSceneObject worldOwner) { GVRComponent world = null; while (worldOwner != null && world == null) { world = worldOwner.getComponent(GVRWorld.getComponentType()); worldOwner = worldOwner.getParent(); } return (GVRWorld) world; } /** * Establishes how this rigid body will behave in the simulation. * * @param type type of simulation desired for the rigid body: * <table> * <tr><td>DYNAMIC</td><td>Collides with other objects, moved by simulation</td></tr> * <tr><td>STATIC</td><td>Collides with other objects, does not move</td></tr> * <tr><td>KINEMATIC</td><td>Collides with other objects, moved by application</td></tr> * </table> */ public void setSimulationType(int type) { Native3DRigidBody.setSimulationType(getNative(), type); } /** * Queries how this rigid body will behave in the simulation. * * @return type of simulation desired for the rigid body * <table> * <tr><td>DYNAMIC</td><td>Collides with other objects, moved by simulation</td></tr> * <tr><td>STATIC</td><td>Collides with other objects, does not move</td></tr> * <tr><td>KINEMATIC</td><td>Collides with other objects, moved by application</td></tr> * </table> */ public int getSimulationType() { return Native3DRigidBody.getSimulationType(getNative()); } /** * Returns the mass of the body. * * @return The mass of the body. */ public float getMass() { return Native3DRigidBody.getMass(getNative()); } /** * Set mass. * * @param mass The mass to the body. */ public void setMass(float mass) { Native3DRigidBody.setMass(getNative(), mass); } /** * Apply a central impulse vector [X, Y, Z] to this {@linkplain GVRRigidBody rigid body} * * @param x factor on the 'X' axis. * @param y factor on the 'Y' axis. * @param z factor on the 'Z' axis. */ public void applyCentralForce(final float x, final float y, final float z) { mPhysicsContext.runOnPhysicsThread(new Runnable() { @Override public void run() { Native3DRigidBody.applyCentralForce(getNative(), x, y, z); } }); } /** * Apply a torque vector [X, Y, Z] to this {@linkplain GVRRigidBody rigid body} * * @param x factor on the 'X' axis. * @param y factor on the 'Y' axis. * @param z factor on the 'Z' axis. */ public void applyTorque(final float x, final float y, final float z) { mPhysicsContext.runOnPhysicsThread(new Runnable() { @Override public void run() { Native3DRigidBody.applyTorque(getNative(), x, y, z); } }); } /** * Sets a particular acceleration vector [X, Y, Z] on this {@linkplain GVRRigidBody rigid body} * * @param x factor on the 'X' axis. * @param y factor on the 'Y' axis. * @param z factor on the 'Z' axis. */ public void setGravity(float x, float y, float z) { Native3DRigidBody.setGravity(getNative(), x, y, z); } /** * Sets linear and angular damping on this {@linkplain GVRRigidBody rigid body} * * @param linear factor on how much the rigid body resists translation. * @param angular factor on how much the rigid body resists rotation. */ public void setDamping(float linear, float angular) { Native3DRigidBody.setDamping(getNative(), linear, angular); } /** * Sets a linear velocity [X, Y, Z] on this {@linkplain GVRRigidBody rigid body} * * @param x factor on the 'X' axis. * @param y factor on the 'Y' axis. * @param z factor on the 'Z' axis. */ public void setLinearVelocity(float x, float y, float z) { Native3DRigidBody.setLinearVelocity(getNative(), x, y, z); } /** * Sets an angular velocity [X, Y, Z] on this {@linkplain GVRRigidBody rigid body} * * @param x factor on the 'X' axis. * @param y factor on the 'Y' axis. * @param z factor on the 'Z' axis. */ public void setAngularVelocity(float x, float y, float z) { Native3DRigidBody.setAngularVelocity(getNative(), x, y, z); } /** * Sets an angular factor [X, Y, Z] that influences torque on this {@linkplain GVRRigidBody rigid body} * * @param x factor on the 'X' axis. * @param y factor on the 'Y' axis. * @param z factor on the 'Z' axis. */ public void setAngularFactor(float x, float y, float z) { Native3DRigidBody.setAngularFactor(getNative(), x, y, z); } /** * Sets an linear factor [X, Y, Z] that influences forces acting on this {@linkplain GVRRigidBody rigid body} * * @param x factor on the 'X' axis. * @param y factor on the 'Y' axis. * @param z factor on the 'Z' axis. */ public void setLinearFactor(float x, float y, float z) { Native3DRigidBody.setLinearFactor(getNative(), x, y, z); } /** * Sets SleepingTresholds that, when reached, influence the deactivation of this {@linkplain GVRRigidBody rigid body} * * @param linear factor for the linearVelocity * @param angular factor for the angularVelocity */ public void setSleepingThresholds(float linear, float angular) { Native3DRigidBody.setSleepingThresholds(getNative(), linear, angular); } /** * Set a {@linkplain GVRRigidBody rigid body} to be ignored (true) or not (false) * * @param collisionObject rigidbody object on the collision check * @param ignore boolean to indicate if the specified object will be ignored or not */ public void setIgnoreCollisionCheck(GVRRigidBody collisionObject, boolean ignore) { Native3DRigidBody.setIgnoreCollisionCheck(getNative(), collisionObject.getNative(), ignore); } /** * Returns the gravity acceleration float array [x,y,z] on this {@linkplain GVRRigidBody rigid body}. * * @return The gravity acceleration vector as a float array */ public float[] getGravity() { return Native3DRigidBody.getGravity(getNative()); } /** * Returns the linear velocity float array [x,y,z] on this {@linkplain GVRRigidBody rigid body}. * * @return The linear velocity vector as a float array */ public float[] getLinearVelocity() { return Native3DRigidBody.getLinearVelocity(getNative()); } /** * Returns the angular velocity float array [x,y,z] on this {@linkplain GVRRigidBody rigid body}. * * @return The angular velocity vector as a float array */ public float[] getAngularVelocity() { return Native3DRigidBody.getAngularVelocity(getNative()); } /** * Returns the angular factor float array [x,y,z] on this {@linkplain GVRRigidBody rigid body}. * * @return The angular factor vector as a float array */ public float[] getAngularFactor() { return Native3DRigidBody.getAngularFactor(getNative()); } /** * Returns the linear factor float array [x,y,z] on this {@linkplain GVRRigidBody rigid body}. * * @return The linear factor vector as a float array */ public float[] getLinearFactor() { return Native3DRigidBody.getLinearFactor(getNative()); } /** * Returns the damping factors [angular,linear] on this {@linkplain GVRRigidBody rigid body}. * * @return The damping factors as a float array */ public float[] getDamping() { return Native3DRigidBody.getDamping(getNative()); } /** * Returns the friction factor on this {@linkplain GVRRigidBody rigid body}. * * @return The friction factor scalar as a float */ public float getFriction() { return Native3DRigidBody.getFriction(getNative()); } /** * Set the friction factor of this {@linkplain GVRRigidBody rigid body} * * @param n the friction factor */ public void setFriction(float n) { Native3DRigidBody.setFriction(getNative(), n); } /** * Returns the restitution factor on this {@linkplain GVRRigidBody rigid body}. * * @return The restitution factor scalar as a float */ public float getRestitution() { return Native3DRigidBody.getRestitution(getNative()); } /** * Set the restitution factor of this {@linkplain GVRRigidBody rigid body} * * @param n the restitution factor */ public void setRestitution(float n) { Native3DRigidBody.setRestitution(getNative(), n); } /** * Returns the continous collision detection motion threshold factor on this {@linkplain GVRRigidBody rigid body}. * * @return The continous collision detection motion threshold factor scalar as a float */ public float getCcdMotionThreshold() { return Native3DRigidBody.getCcdMotionThreshold(getNative()); } /** * Set the continous collision detection motion threshold factor of this {@linkplain GVRRigidBody rigid body} * * @param n the continous collision detection motion threshold factor */ public void setCcdMotionThreshold(float n) { Native3DRigidBody.setCcdMotionThreshold(getNative(), n); } /** * Returns the contact processing threshold factor for this {@linkplain GVRRigidBody rigid body}. * * @return The contact processing threshold factor scalar as a float */ public float getContactProcessingThreshold() { return Native3DRigidBody.getContactProcessingThreshold(getNative()); } /** * Set the contact processing threshold factor of this {@linkplain GVRRigidBody rigid body} * * @param n the contact processing threshold factor */ public void setContactProcessingThreshold(float n) { Native3DRigidBody.setContactProcessingThreshold(getNative(), n); } /** * Returns the collision group of this {@linkplain GVRRigidBody rigid body}. * * @return The collision group id as an int */ public int getCollisionGroup() { return mCollisionGroup; } @Override public void onAttach(GVRSceneObject newOwner) { if (newOwner.getCollider() == null) { throw new UnsupportedOperationException("You must have a collider attached to the scene object before attaching the rigid body"); } final GVRRenderData renderData = newOwner.getRenderData(); if (renderData != null && renderData.getMesh() == null) { throw new UnsupportedOperationException("You must have a mesh attached to the scene object before attaching the rigid body"); } super.onAttach(newOwner); } @Override protected void addToWorld(GVRWorld world) { if (world != null) { world.addBody(this); } } @Override protected void removeFromWorld(GVRWorld world) { if (world != null) { world.removeBody(this); } } } class Native3DRigidBody { static native long ctor(); static native long getComponentType(); static native float getMass(long jrigid_body); static native void setMass(long jrigid_body, float jmass); static native void applyCentralForce(long jrigid_body, float x, float y, float z); static native void applyTorque(long jrigid_body, float x, float y, float z); static native void setGravity(long jrigid_body, float x, float y, float z); static native void setDamping(long jrigid_body, float linear, float angular); static native void setLinearVelocity(long jrigid_body, float x, float y, float z); static native void setAngularVelocity(long jrigid_body, float x, float y, float z); static native void setAngularFactor(long jrigid_body, float x, float y, float z); static native void setLinearFactor(long jrigid_body, float x, float y, float z); static native void setFriction(long jrigid_body, float n); static native void setRestitution(long jrigid_body, float n); static native void setSleepingThresholds(long jrigid_body, float linear, float angular); static native void setCcdMotionThreshold(long jrigid_body, float n); static native void setContactProcessingThreshold(long jrigid_body, float n); static native void setIgnoreCollisionCheck(long jrigid_body, long jcollision_object, boolean ignore); static native float[] getGravity(long jrigid_body); static native float[] getLinearVelocity(long jrigid_body); static native float[] getAngularVelocity(long jrigid_body); static native float[] getAngularFactor(long jrigid_body); static native float[] getLinearFactor(long jrigid_body); static native float[] getDamping(long jrigid_body); static native float getFriction(long jrigid_body); static native float getRestitution(long jrigid_body); static native float getCcdMotionThreshold(long jrigid_body); static native float getContactProcessingThreshold(long jrigid_body); static native int getSimulationType(long jrigid_body); static native void setSimulationType(long jrigid_body, int jtype); }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.widget; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.res.Resources; import android.media.AudioManager; import android.media.MediaPlayer; import android.media.Metadata; import android.media.MediaPlayer.OnCompletionListener; import android.media.MediaPlayer.OnErrorListener; import android.media.MediaPlayer.OnInfoListener; import android.net.Uri; import android.util.AttributeSet; import android.util.Log; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.view.accessibility.AccessibilityEvent; import android.view.accessibility.AccessibilityNodeInfo; import android.widget.MediaController.MediaPlayerControl; import java.io.IOException; import java.util.Map; /** * Displays a video file. The VideoView class * can load images from various sources (such as resources or content * providers), takes care of computing its measurement from the video so that * it can be used in any layout manager, and provides various display options * such as scaling and tinting. */ public class VideoView extends SurfaceView implements MediaPlayerControl { private String TAG = "VideoView"; // settable by the client private Uri mUri; private Map<String, String> mHeaders; private int mDuration; // all possible internal states private static final int STATE_ERROR = -1; private static final int STATE_IDLE = 0; private static final int STATE_PREPARING = 1; private static final int STATE_PREPARED = 2; private static final int STATE_PLAYING = 3; private static final int STATE_PAUSED = 4; private static final int STATE_PLAYBACK_COMPLETED = 5; // mCurrentState is a VideoView object's current state. // mTargetState is the state that a method caller intends to reach. // For instance, regardless the VideoView object's current state, // calling pause() intends to bring the object to a target state // of STATE_PAUSED. private int mCurrentState = STATE_IDLE; private int mTargetState = STATE_IDLE; // All the stuff we need for playing and showing a video private SurfaceHolder mSurfaceHolder = null; private MediaPlayer mMediaPlayer = null; private int mVideoWidth; private int mVideoHeight; private int mSurfaceWidth; private int mSurfaceHeight; private MediaController mMediaController; private OnCompletionListener mOnCompletionListener; private MediaPlayer.OnPreparedListener mOnPreparedListener; private int mCurrentBufferPercentage; private OnErrorListener mOnErrorListener; private OnInfoListener mOnInfoListener; private int mSeekWhenPrepared; // recording the seek position while preparing private boolean mCanPause; private boolean mCanSeekBack; private boolean mCanSeekForward; public VideoView(Context context) { super(context); initVideoView(); } public VideoView(Context context, AttributeSet attrs) { this(context, attrs, 0); initVideoView(); } public VideoView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); initVideoView(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { //Log.i("@@@@", "onMeasure"); int width = getDefaultSize(mVideoWidth, widthMeasureSpec); int height = getDefaultSize(mVideoHeight, heightMeasureSpec); if (mVideoWidth > 0 && mVideoHeight > 0) { if ( mVideoWidth * height > width * mVideoHeight ) { //Log.i("@@@", "image too tall, correcting"); height = width * mVideoHeight / mVideoWidth; } else if ( mVideoWidth * height < width * mVideoHeight ) { //Log.i("@@@", "image too wide, correcting"); width = height * mVideoWidth / mVideoHeight; } else { //Log.i("@@@", "aspect ratio is correct: " + //width+"/"+height+"="+ //mVideoWidth+"/"+mVideoHeight); } } //Log.i("@@@@@@@@@@", "setting size: " + width + 'x' + height); setMeasuredDimension(width, height); } @Override public void onInitializeAccessibilityEvent(AccessibilityEvent event) { super.onInitializeAccessibilityEvent(event); event.setClassName(VideoView.class.getName()); } @Override public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(info); info.setClassName(VideoView.class.getName()); } public int resolveAdjustedSize(int desiredSize, int measureSpec) { int result = desiredSize; int specMode = MeasureSpec.getMode(measureSpec); int specSize = MeasureSpec.getSize(measureSpec); switch (specMode) { case MeasureSpec.UNSPECIFIED: /* Parent says we can be as big as we want. Just don't be larger * than max size imposed on ourselves. */ result = desiredSize; break; case MeasureSpec.AT_MOST: /* Parent says we can be as big as we want, up to specSize. * Don't be larger than specSize, and don't be larger than * the max size imposed on ourselves. */ result = Math.min(desiredSize, specSize); break; case MeasureSpec.EXACTLY: // No choice. Do what we are told. result = specSize; break; } return result; } private void initVideoView() { mVideoWidth = 0; mVideoHeight = 0; getHolder().addCallback(mSHCallback); getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); setFocusable(true); setFocusableInTouchMode(true); requestFocus(); mCurrentState = STATE_IDLE; mTargetState = STATE_IDLE; } public void setVideoPath(String path) { setVideoURI(Uri.parse(path)); } public void setVideoURI(Uri uri) { setVideoURI(uri, null); } /** * @hide */ public void setVideoURI(Uri uri, Map<String, String> headers) { mUri = uri; mHeaders = headers; mSeekWhenPrepared = 0; openVideo(); requestLayout(); invalidate(); } public void stopPlayback() { if (mMediaPlayer != null) { mMediaPlayer.stop(); mMediaPlayer.release(); mMediaPlayer = null; mCurrentState = STATE_IDLE; mTargetState = STATE_IDLE; } } private void openVideo() { if (mUri == null || mSurfaceHolder == null) { // not ready for playback just yet, will try again later return; } // Tell the music playback service to pause // TODO: these constants need to be published somewhere in the framework. Intent i = new Intent("com.android.music.musicservicecommand"); i.putExtra("command", "pause"); mContext.sendBroadcast(i); // we shouldn't clear the target state, because somebody might have // called start() previously release(false); try { mMediaPlayer = new MediaPlayer(); mMediaPlayer.setOnPreparedListener(mPreparedListener); mMediaPlayer.setOnVideoSizeChangedListener(mSizeChangedListener); mDuration = -1; mMediaPlayer.setOnCompletionListener(mCompletionListener); mMediaPlayer.setOnErrorListener(mErrorListener); mMediaPlayer.setOnInfoListener(mOnInfoListener); mMediaPlayer.setOnBufferingUpdateListener(mBufferingUpdateListener); mCurrentBufferPercentage = 0; mMediaPlayer.setDataSource(mContext, mUri, mHeaders); mMediaPlayer.setDisplay(mSurfaceHolder); mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC); mMediaPlayer.setScreenOnWhilePlaying(true); mMediaPlayer.prepareAsync(); // we don't set the target state here either, but preserve the // target state that was there before. mCurrentState = STATE_PREPARING; attachMediaController(); } catch (IOException ex) { Log.w(TAG, "Unable to open content: " + mUri, ex); mCurrentState = STATE_ERROR; mTargetState = STATE_ERROR; mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0); return; } catch (IllegalArgumentException ex) { Log.w(TAG, "Unable to open content: " + mUri, ex); mCurrentState = STATE_ERROR; mTargetState = STATE_ERROR; mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0); return; } } public void setMediaController(MediaController controller) { if (mMediaController != null) { mMediaController.hide(); } mMediaController = controller; attachMediaController(); } private void attachMediaController() { if (mMediaPlayer != null && mMediaController != null) { mMediaController.setMediaPlayer(this); View anchorView = this.getParent() instanceof View ? (View)this.getParent() : this; mMediaController.setAnchorView(anchorView); mMediaController.setEnabled(isInPlaybackState()); } } MediaPlayer.OnVideoSizeChangedListener mSizeChangedListener = new MediaPlayer.OnVideoSizeChangedListener() { public void onVideoSizeChanged(MediaPlayer mp, int width, int height) { mVideoWidth = mp.getVideoWidth(); mVideoHeight = mp.getVideoHeight(); if (mVideoWidth != 0 && mVideoHeight != 0) { getHolder().setFixedSize(mVideoWidth, mVideoHeight); requestLayout(); } } }; MediaPlayer.OnPreparedListener mPreparedListener = new MediaPlayer.OnPreparedListener() { public void onPrepared(MediaPlayer mp) { mCurrentState = STATE_PREPARED; // Get the capabilities of the player for this stream Metadata data = mp.getMetadata(MediaPlayer.METADATA_ALL, MediaPlayer.BYPASS_METADATA_FILTER); if (data != null) { mCanPause = !data.has(Metadata.PAUSE_AVAILABLE) || data.getBoolean(Metadata.PAUSE_AVAILABLE); mCanSeekBack = !data.has(Metadata.SEEK_BACKWARD_AVAILABLE) || data.getBoolean(Metadata.SEEK_BACKWARD_AVAILABLE); mCanSeekForward = !data.has(Metadata.SEEK_FORWARD_AVAILABLE) || data.getBoolean(Metadata.SEEK_FORWARD_AVAILABLE); } else { mCanPause = mCanSeekBack = mCanSeekForward = true; } if (mOnPreparedListener != null) { mOnPreparedListener.onPrepared(mMediaPlayer); } if (mMediaController != null) { mMediaController.setEnabled(true); } mVideoWidth = mp.getVideoWidth(); mVideoHeight = mp.getVideoHeight(); int seekToPosition = mSeekWhenPrepared; // mSeekWhenPrepared may be changed after seekTo() call if (seekToPosition != 0) { seekTo(seekToPosition); } if (mVideoWidth != 0 && mVideoHeight != 0) { //Log.i("@@@@", "video size: " + mVideoWidth +"/"+ mVideoHeight); getHolder().setFixedSize(mVideoWidth, mVideoHeight); if (mSurfaceWidth == mVideoWidth && mSurfaceHeight == mVideoHeight) { // We didn't actually change the size (it was already at the size // we need), so we won't get a "surface changed" callback, so // start the video here instead of in the callback. if (mTargetState == STATE_PLAYING) { start(); if (mMediaController != null) { mMediaController.show(); } } else if (!isPlaying() && (seekToPosition != 0 || getCurrentPosition() > 0)) { if (mMediaController != null) { // Show the media controls when we're paused into a video and make 'em stick. mMediaController.show(0); } } } } else { // We don't know the video size yet, but should start anyway. // The video size might be reported to us later. if (mTargetState == STATE_PLAYING) { start(); } } } }; private MediaPlayer.OnCompletionListener mCompletionListener = new MediaPlayer.OnCompletionListener() { public void onCompletion(MediaPlayer mp) { mCurrentState = STATE_PLAYBACK_COMPLETED; mTargetState = STATE_PLAYBACK_COMPLETED; if (mMediaController != null) { mMediaController.hide(); } if (mOnCompletionListener != null) { mOnCompletionListener.onCompletion(mMediaPlayer); } } }; private MediaPlayer.OnErrorListener mErrorListener = new MediaPlayer.OnErrorListener() { public boolean onError(MediaPlayer mp, int framework_err, int impl_err) { Log.d(TAG, "Error: " + framework_err + "," + impl_err); mCurrentState = STATE_ERROR; mTargetState = STATE_ERROR; if (mMediaController != null) { mMediaController.hide(); } /* If an error handler has been supplied, use it and finish. */ if (mOnErrorListener != null) { if (mOnErrorListener.onError(mMediaPlayer, framework_err, impl_err)) { return true; } } /* Otherwise, pop up an error dialog so the user knows that * something bad has happened. Only try and pop up the dialog * if we're attached to a window. When we're going away and no * longer have a window, don't bother showing the user an error. */ if (getWindowToken() != null) { Resources r = mContext.getResources(); int messageId; if (framework_err == MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK) { messageId = com.android.internal.R.string.VideoView_error_text_invalid_progressive_playback; } else { messageId = com.android.internal.R.string.VideoView_error_text_unknown; } new AlertDialog.Builder(mContext) .setMessage(messageId) .setPositiveButton(com.android.internal.R.string.VideoView_error_button, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { /* If we get here, there is no onError listener, so * at least inform them that the video is over. */ if (mOnCompletionListener != null) { mOnCompletionListener.onCompletion(mMediaPlayer); } } }) .setCancelable(false) .show(); } return true; } }; private MediaPlayer.OnBufferingUpdateListener mBufferingUpdateListener = new MediaPlayer.OnBufferingUpdateListener() { public void onBufferingUpdate(MediaPlayer mp, int percent) { mCurrentBufferPercentage = percent; } }; /** * Register a callback to be invoked when the media file * is loaded and ready to go. * * @param l The callback that will be run */ public void setOnPreparedListener(MediaPlayer.OnPreparedListener l) { mOnPreparedListener = l; } /** * Register a callback to be invoked when the end of a media file * has been reached during playback. * * @param l The callback that will be run */ public void setOnCompletionListener(OnCompletionListener l) { mOnCompletionListener = l; } /** * Register a callback to be invoked when an error occurs * during playback or setup. If no listener is specified, * or if the listener returned false, VideoView will inform * the user of any errors. * * @param l The callback that will be run */ public void setOnErrorListener(OnErrorListener l) { mOnErrorListener = l; } /** * Register a callback to be invoked when an informational event * occurs during playback or setup. * * @param l The callback that will be run */ public void setOnInfoListener(OnInfoListener l) { mOnInfoListener = l; } SurfaceHolder.Callback mSHCallback = new SurfaceHolder.Callback() { public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { mSurfaceWidth = w; mSurfaceHeight = h; boolean isValidState = (mTargetState == STATE_PLAYING); boolean hasValidSize = (mVideoWidth == w && mVideoHeight == h); if (mMediaPlayer != null && isValidState && hasValidSize) { if (mSeekWhenPrepared != 0) { seekTo(mSeekWhenPrepared); } start(); } } public void surfaceCreated(SurfaceHolder holder) { mSurfaceHolder = holder; openVideo(); } public void surfaceDestroyed(SurfaceHolder holder) { // after we return from this we can't use the surface any more mSurfaceHolder = null; if (mMediaController != null) mMediaController.hide(); release(true); } }; /* * release the media player in any state */ private void release(boolean cleartargetstate) { if (mMediaPlayer != null) { mMediaPlayer.reset(); mMediaPlayer.release(); mMediaPlayer = null; mCurrentState = STATE_IDLE; if (cleartargetstate) { mTargetState = STATE_IDLE; } } } @Override public boolean onTouchEvent(MotionEvent ev) { if (isInPlaybackState() && mMediaController != null) { toggleMediaControlsVisiblity(); } return false; } @Override public boolean onTrackballEvent(MotionEvent ev) { if (isInPlaybackState() && mMediaController != null) { toggleMediaControlsVisiblity(); } return false; } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { boolean isKeyCodeSupported = keyCode != KeyEvent.KEYCODE_BACK && keyCode != KeyEvent.KEYCODE_VOLUME_UP && keyCode != KeyEvent.KEYCODE_VOLUME_DOWN && keyCode != KeyEvent.KEYCODE_VOLUME_MUTE && keyCode != KeyEvent.KEYCODE_MENU && keyCode != KeyEvent.KEYCODE_CALL && keyCode != KeyEvent.KEYCODE_ENDCALL; if (isInPlaybackState() && isKeyCodeSupported && mMediaController != null) { if (keyCode == KeyEvent.KEYCODE_HEADSETHOOK || keyCode == KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE) { if (mMediaPlayer.isPlaying()) { pause(); mMediaController.show(); } else { start(); mMediaController.hide(); } return true; } else if (keyCode == KeyEvent.KEYCODE_MEDIA_PLAY) { if (!mMediaPlayer.isPlaying()) { start(); mMediaController.hide(); } return true; } else if (keyCode == KeyEvent.KEYCODE_MEDIA_STOP || keyCode == KeyEvent.KEYCODE_MEDIA_PAUSE) { if (mMediaPlayer.isPlaying()) { pause(); mMediaController.show(); } return true; } else { toggleMediaControlsVisiblity(); } } return super.onKeyDown(keyCode, event); } private void toggleMediaControlsVisiblity() { if (mMediaController.isShowing()) { mMediaController.hide(); } else { mMediaController.show(); } } public void start() { if (isInPlaybackState()) { mMediaPlayer.start(); mCurrentState = STATE_PLAYING; } mTargetState = STATE_PLAYING; } public void pause() { if (isInPlaybackState()) { if (mMediaPlayer.isPlaying()) { mMediaPlayer.pause(); mCurrentState = STATE_PAUSED; } } mTargetState = STATE_PAUSED; } public void suspend() { release(false); } public void resume() { openVideo(); } // cache duration as mDuration for faster access public int getDuration() { if (isInPlaybackState()) { if (mDuration > 0) { return mDuration; } mDuration = mMediaPlayer.getDuration(); return mDuration; } mDuration = -1; return mDuration; } public int getCurrentPosition() { if (isInPlaybackState()) { return mMediaPlayer.getCurrentPosition(); } return 0; } public void seekTo(int msec) { if (isInPlaybackState()) { mMediaPlayer.seekTo(msec); mSeekWhenPrepared = 0; } else { mSeekWhenPrepared = msec; } } public boolean isPlaying() { return isInPlaybackState() && mMediaPlayer.isPlaying(); } public int getBufferPercentage() { if (mMediaPlayer != null) { return mCurrentBufferPercentage; } return 0; } private boolean isInPlaybackState() { return (mMediaPlayer != null && mCurrentState != STATE_ERROR && mCurrentState != STATE_IDLE && mCurrentState != STATE_PREPARING); } public boolean canPause() { return mCanPause; } public boolean canSeekBackward() { return mCanSeekBack; } public boolean canSeekForward() { return mCanSeekForward; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie; import java.io.IOException; import java.io.Writer; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import org.apache.hadoop.conf.Configuration; import org.apache.oozie.client.CoordinatorAction; import org.apache.oozie.client.CoordinatorJob; import org.apache.oozie.client.OozieClient; import org.apache.oozie.client.WorkflowJob; import org.apache.oozie.client.rest.RestConstants; import org.apache.oozie.command.CommandException; import org.apache.oozie.command.coord.CoordActionInfoXCommand; import org.apache.oozie.util.CoordActionsInDateRange; import org.apache.oozie.command.coord.CoordChangeXCommand; import org.apache.oozie.command.coord.CoordJobXCommand; import org.apache.oozie.command.coord.CoordJobsXCommand; import org.apache.oozie.command.coord.CoordKillXCommand; import org.apache.oozie.command.coord.CoordRerunXCommand; import org.apache.oozie.command.coord.CoordResumeXCommand; import org.apache.oozie.command.coord.CoordSubmitXCommand; import org.apache.oozie.command.coord.CoordSuspendXCommand; import org.apache.oozie.service.DagXLogInfoService; import org.apache.oozie.service.Services; import org.apache.oozie.service.XLogService; import org.apache.oozie.util.ParamChecker; import org.apache.oozie.util.XLog; import org.apache.oozie.util.XLogStreamer; import com.google.common.annotations.VisibleForTesting; public class CoordinatorEngine extends BaseEngine { private static XLog LOG = XLog.getLog(CoordinatorEngine.class); /** * Create a system Coordinator engine, with no user and no group. */ public CoordinatorEngine() { if (Services.get().getConf().getBoolean(USE_XCOMMAND, true) == false) { LOG.debug("Oozie CoordinatorEngine is not using XCommands."); } else { LOG.debug("Oozie CoordinatorEngine is using XCommands."); } } /** * Create a Coordinator engine to perform operations on behave of a user. * * @param user user name. */ public CoordinatorEngine(String user) { this(); this.user = ParamChecker.notEmpty(user, "user"); } /* * (non-Javadoc) * * @see org.apache.oozie.BaseEngine#getDefinition(java.lang.String) */ @Override public String getDefinition(String jobId) throws BaseEngineException { CoordinatorJobBean job = getCoordJobWithNoActionInfo(jobId); return job.getOrigJobXml(); } /** * @param jobId * @return CoordinatorJobBean * @throws BaseEngineException */ private CoordinatorJobBean getCoordJobWithNoActionInfo(String jobId) throws BaseEngineException { try { return new CoordJobXCommand(jobId).call(); } catch (CommandException ex) { throw new BaseEngineException(ex); } } /** * @param actionId * @return CoordinatorActionBean * @throws BaseEngineException */ public CoordinatorActionBean getCoordAction(String actionId) throws BaseEngineException { try { return new CoordActionInfoXCommand(actionId).call(); } catch (CommandException ex) { throw new BaseEngineException(ex); } } /* * (non-Javadoc) * * @see org.apache.oozie.BaseEngine#getCoordJob(java.lang.String) */ @Override public CoordinatorJobBean getCoordJob(String jobId) throws BaseEngineException { try { return new CoordJobXCommand(jobId).call(); } catch (CommandException ex) { throw new BaseEngineException(ex); } } /* * (non-Javadoc) * * @see org.apache.oozie.BaseEngine#getCoordJob(java.lang.String, java.lang.String, int, int) */ @Override public CoordinatorJobBean getCoordJob(String jobId, String filter, int start, int length, boolean desc) throws BaseEngineException { List<String> filterList = parseStatusFilter(filter); try { return new CoordJobXCommand(jobId, filterList, start, length, desc) .call(); } catch (CommandException ex) { throw new BaseEngineException(ex); } } /* * (non-Javadoc) * * @see org.apache.oozie.BaseEngine#getJobIdForExternalId(java.lang.String) */ @Override public String getJobIdForExternalId(String externalId) throws CoordinatorEngineException { return null; } /* * (non-Javadoc) * * @see org.apache.oozie.BaseEngine#kill(java.lang.String) */ @Override public void kill(String jobId) throws CoordinatorEngineException { try { new CoordKillXCommand(jobId).call(); LOG.info("User " + user + " killed the Coordinator job " + jobId); } catch (CommandException e) { throw new CoordinatorEngineException(e); } } /* (non-Javadoc) * @see org.apache.oozie.BaseEngine#change(java.lang.String, java.lang.String) */ @Override public void change(String jobId, String changeValue) throws CoordinatorEngineException { try { new CoordChangeXCommand(jobId, changeValue).call(); LOG.info("User " + user + " changed the Coordinator job " + jobId + " to " + changeValue); } catch (CommandException e) { throw new CoordinatorEngineException(e); } } @Override @Deprecated public void reRun(String jobId, Configuration conf) throws BaseEngineException { throw new BaseEngineException(new XException(ErrorCode.E0301, "invalid use of rerun")); } /** * Rerun coordinator actions for given rerunType * * @param jobId * @param rerunType * @param scope * @param refresh * @param noCleanup * @throws BaseEngineException */ public CoordinatorActionInfo reRun(String jobId, String rerunType, String scope, boolean refresh, boolean noCleanup) throws BaseEngineException { try { return new CoordRerunXCommand(jobId, rerunType, scope, refresh, noCleanup).call(); } catch (CommandException ex) { throw new BaseEngineException(ex); } } /* * (non-Javadoc) * * @see org.apache.oozie.BaseEngine#resume(java.lang.String) */ @Override public void resume(String jobId) throws CoordinatorEngineException { try { new CoordResumeXCommand(jobId).call(); } catch (CommandException e) { throw new CoordinatorEngineException(e); } } @Override @Deprecated public void start(String jobId) throws BaseEngineException { throw new BaseEngineException(new XException(ErrorCode.E0301, "invalid use of start")); } /* * (non-Javadoc) * * @see org.apache.oozie.BaseEngine#streamLog(java.lang.String, * java.io.Writer) */ @Override public void streamLog(String jobId, Writer writer) throws IOException, BaseEngineException { XLogStreamer.Filter filter = new XLogStreamer.Filter(); filter.setParameter(DagXLogInfoService.JOB, jobId); CoordinatorJobBean job = getCoordJobWithNoActionInfo(jobId); Services.get().get(XLogService.class).streamLog(filter, job.getCreatedTime(), new Date(), writer); } /** * Add list of actions to the filter based on conditions * * @param jobId Job Id * @param logRetrievalScope Value for the retrieval type * @param logRetrievalType Based on which filter criteria the log is retrieved * @param writer writer to stream the log to * @throws IOException * @throws BaseEngineException * @throws CommandException */ public void streamLog(String jobId, String logRetrievalScope, String logRetrievalType, Writer writer) throws IOException, BaseEngineException, CommandException { XLogStreamer.Filter filter = new XLogStreamer.Filter(); filter.setParameter(DagXLogInfoService.JOB, jobId); if (logRetrievalScope != null && logRetrievalType != null) { // if coordinator action logs are to be retrieved based on action id range if (logRetrievalType.equals(RestConstants.JOB_LOG_ACTION)) { // Use set implementation that maintains order or elements to achieve reproducibility: Set<String> actionSet = new LinkedHashSet<String>(); String[] list = logRetrievalScope.split(","); for (String s : list) { s = s.trim(); if (s.contains("-")) { String[] range = s.split("-"); if (range.length != 2) { throw new CommandException(ErrorCode.E0302, "format is wrong for action's range '" + s + "'"); } int start; int end; try { start = Integer.parseInt(range[0].trim()); } catch (NumberFormatException ne) { throw new CommandException(ErrorCode.E0302, "could not parse " + range[0].trim() + "into an integer", ne); } try { end = Integer.parseInt(range[1].trim()); } catch (NumberFormatException ne) { throw new CommandException(ErrorCode.E0302, "could not parse " + range[1].trim() + "into an integer", ne); } if (start > end) { throw new CommandException(ErrorCode.E0302, "format is wrong for action's range '" + s + "'"); } for (int i = start; i <= end; i++) { actionSet.add(jobId + "@" + i); } } else { try { Integer.parseInt(s); } catch (NumberFormatException ne) { throw new CommandException(ErrorCode.E0302, "format is wrong for action id'" + s + "'. Integer only."); } actionSet.add(jobId + "@" + s); } } Iterator<String> actionsIterator = actionSet.iterator(); StringBuilder orSeparatedActions = new StringBuilder(""); boolean orRequired = false; while (actionsIterator.hasNext()) { if (orRequired) { orSeparatedActions.append("|"); } orSeparatedActions.append(actionsIterator.next().toString()); orRequired = true; } if (actionSet.size() > 1 && orRequired) { orSeparatedActions.insert(0, "("); orSeparatedActions.append(")"); } filter.setParameter(DagXLogInfoService.ACTION, orSeparatedActions.toString()); } // if coordinator action logs are to be retrieved based on date range // this block gets the corresponding list of coordinator actions to be used by the log filter if (logRetrievalType.equalsIgnoreCase(RestConstants.JOB_LOG_DATE)) { List<String> coordActionIdList = null; try { coordActionIdList = CoordActionsInDateRange.getCoordActionIdsFromDates(jobId, logRetrievalScope); } catch (XException xe) { throw new CommandException(ErrorCode.E0302, "Error in date range for coordinator actions", xe); } StringBuilder orSeparatedActions = new StringBuilder(""); boolean orRequired = false; for (String coordActionId : coordActionIdList) { if (orRequired) { orSeparatedActions.append("|"); } orSeparatedActions.append(coordActionId); orRequired = true; } if (coordActionIdList.size() > 1 && orRequired) { orSeparatedActions.insert(0, "("); orSeparatedActions.append(")"); } filter.setParameter(DagXLogInfoService.ACTION, orSeparatedActions.toString()); } } CoordinatorJobBean job = getCoordJobWithNoActionInfo(jobId); Services.get().get(XLogService.class).streamLog(filter, job.getCreatedTime(), new Date(), writer); } /* * (non-Javadoc) * * @see * org.apache.oozie.BaseEngine#submitJob(org.apache.hadoop.conf.Configuration * , boolean) */ @Override public String submitJob(Configuration conf, boolean startJob) throws CoordinatorEngineException { try { CoordSubmitXCommand submit = new CoordSubmitXCommand(conf); return submit.call(); } catch (CommandException ex) { throw new CoordinatorEngineException(ex); } } /* * (non-Javadoc) * * @see * org.apache.oozie.BaseEngine#dryRunSubmit(org.apache.hadoop.conf.Configuration) */ @Override public String dryRunSubmit(Configuration conf) throws CoordinatorEngineException { try { CoordSubmitXCommand submit = new CoordSubmitXCommand(true, conf); return submit.call(); } catch (CommandException ex) { throw new CoordinatorEngineException(ex); } } /* * (non-Javadoc) * * @see org.apache.oozie.BaseEngine#suspend(java.lang.String) */ @Override public void suspend(String jobId) throws CoordinatorEngineException { try { new CoordSuspendXCommand(jobId).call(); } catch (CommandException e) { throw new CoordinatorEngineException(e); } } /* * (non-Javadoc) * * @see org.apache.oozie.BaseEngine#getJob(java.lang.String) */ @Override public WorkflowJob getJob(String jobId) throws BaseEngineException { throw new BaseEngineException(new XException(ErrorCode.E0301, "cannot get a workflow job from CoordinatorEngine")); } /* * (non-Javadoc) * * @see org.apache.oozie.BaseEngine#getJob(java.lang.String, int, int) */ @Override public WorkflowJob getJob(String jobId, int start, int length) throws BaseEngineException { throw new BaseEngineException(new XException(ErrorCode.E0301, "cannot get a workflow job from CoordinatorEngine")); } private static final Set<String> FILTER_NAMES = new HashSet<String>(); static { FILTER_NAMES.add(OozieClient.FILTER_USER); FILTER_NAMES.add(OozieClient.FILTER_NAME); FILTER_NAMES.add(OozieClient.FILTER_GROUP); FILTER_NAMES.add(OozieClient.FILTER_STATUS); FILTER_NAMES.add(OozieClient.FILTER_ID); FILTER_NAMES.add(OozieClient.FILTER_FREQUENCY); FILTER_NAMES.add(OozieClient.FILTER_UNIT); } /** * @param filter * @param start * @param len * @return CoordinatorJobInfo * @throws CoordinatorEngineException */ public CoordinatorJobInfo getCoordJobs(String filter, int start, int len) throws CoordinatorEngineException { Map<String, List<String>> filterList = parseFilter(filter); try { return new CoordJobsXCommand(filterList, start, len).call(); } catch (CommandException ex) { throw new CoordinatorEngineException(ex); } } // Parses the filter string (e.g status=RUNNING;status=WAITING) and returns a list of status values private List<String> parseStatusFilter(String filter) throws CoordinatorEngineException { List<String> filterList = new ArrayList<String>(); if (filter != null) { //split name;value pairs StringTokenizer st = new StringTokenizer(filter, ";"); while (st.hasMoreTokens()) { String token = st.nextToken(); if (token.contains("=")) { String[] pair = token.split("="); if (pair.length != 2) { throw new CoordinatorEngineException(ErrorCode.E0421, token, "elements must be name=value pairs"); } if (pair[0].equalsIgnoreCase("status")) { String statusValue = pair[1]; try { CoordinatorAction.Status.valueOf(statusValue); } catch (IllegalArgumentException ex) { StringBuilder validStatusList = new StringBuilder(); for (CoordinatorAction.Status status: CoordinatorAction.Status.values()){ validStatusList.append(status.toString()+" "); } // Check for incorrect status value throw new CoordinatorEngineException(ErrorCode.E0421, filter, XLog.format( "invalid status value [{0}]." + " Valid status values are: [{1}]", statusValue, validStatusList)); } filterList.add(statusValue); } else { // Check for incorrect filter option throw new CoordinatorEngineException(ErrorCode.E0421, filter, XLog.format( "invalid filter [{0}]." + " The only valid filter is \"status\"", pair[0])); } } else { throw new CoordinatorEngineException(ErrorCode.E0421, token, "elements must be name=value pairs"); } } } return filterList; } /** * @param filter * @return Map<String, List<String>> * @throws CoordinatorEngineException */ @VisibleForTesting Map<String, List<String>> parseFilter(String filter) throws CoordinatorEngineException { Map<String, List<String>> map = new HashMap<String, List<String>>(); boolean isTimeUnitSpecified = false; String timeUnit = "MINUTE"; boolean isFrequencySpecified = false; String frequency = ""; if (filter != null) { StringTokenizer st = new StringTokenizer(filter, ";"); while (st.hasMoreTokens()) { String token = st.nextToken(); if (token.contains("=")) { String[] pair = token.split("="); if (pair.length != 2) { throw new CoordinatorEngineException(ErrorCode.E0420, filter, "elements must be name=value pairs"); } if (!FILTER_NAMES.contains(pair[0].toLowerCase())) { throw new CoordinatorEngineException(ErrorCode.E0420, filter, XLog.format("invalid name [{0}]", pair[0])); } if (pair[0].equalsIgnoreCase("frequency")) { isFrequencySpecified = true; try { frequency = (int) Float.parseFloat(pair[1]) + ""; continue; } catch (NumberFormatException NANException) { throw new CoordinatorEngineException(ErrorCode.E0420, filter, XLog.format( "invalid value [{0}] for frequency. A numerical value is expected", pair[1])); } } if (pair[0].equalsIgnoreCase("unit")) { isTimeUnitSpecified = true; timeUnit = pair[1]; if (!timeUnit.equalsIgnoreCase("months") && !timeUnit.equalsIgnoreCase("days") && !timeUnit.equalsIgnoreCase("hours") && !timeUnit.equalsIgnoreCase("minutes")) { throw new CoordinatorEngineException(ErrorCode.E0420, filter, XLog.format( "invalid value [{0}] for time unit. " + "Valid value is one of months, days, hours or minutes", pair[1])); } continue; } if (pair[0].equals("status")) { try { CoordinatorJob.Status.valueOf(pair[1]); } catch (IllegalArgumentException ex) { throw new CoordinatorEngineException(ErrorCode.E0420, filter, XLog.format( "invalid status [{0}]", pair[1])); } } List<String> list = map.get(pair[0]); if (list == null) { list = new ArrayList<String>(); map.put(pair[0], list); } list.add(pair[1]); } else { throw new CoordinatorEngineException(ErrorCode.E0420, filter, "elements must be name=value pairs"); } } // Unit is specified and frequency is not specified if (!isFrequencySpecified && isTimeUnitSpecified) { throw new CoordinatorEngineException(ErrorCode.E0420, filter, "time unit should be added only when " + "frequency is specified. Either specify frequency also or else remove the time unit"); } else if (isFrequencySpecified) { // Frequency value is specified if (isTimeUnitSpecified) { if (timeUnit.equalsIgnoreCase("months")) { timeUnit = "MONTH"; } else if (timeUnit.equalsIgnoreCase("days")) { timeUnit = "DAY"; } else if (timeUnit.equalsIgnoreCase("hours")) { // When job details are persisted to database, frequency in hours are converted to minutes. // This conversion is to conform with that. frequency = Integer.parseInt(frequency) * 60 + ""; timeUnit = "MINUTE"; } else if (timeUnit.equalsIgnoreCase("minutes")) { timeUnit = "MINUTE"; } } // Adding the frequency and time unit filters to the filter map List<String> list = new ArrayList<String>(); list.add(timeUnit); map.put("unit", list); list = new ArrayList<String>(); list.add(frequency); map.put("frequency", list); } } return map; } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.media; import android.content.ContentResolver; import android.content.Context; import android.content.res.AssetFileDescriptor; import android.media.MediaCodec; import android.media.MediaFormat; import android.net.Uri; import java.io.FileDescriptor; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.HashMap; import java.util.Map; import java.util.UUID; /** * MediaExtractor facilitates extraction of demuxed, typically encoded, media data * from a data source. * <p>It is generally used like this: * <pre> * MediaExtractor extractor = new MediaExtractor(); * extractor.setDataSource(...); * int numTracks = extractor.getTrackCount(); * for (int i = 0; i &lt; numTracks; ++i) { * MediaFormat format = extractor.getTrackFormat(i); * String mime = format.getString(MediaFormat.KEY_MIME); * if (weAreInterestedInThisTrack) { * extractor.selectTrack(i); * } * } * ByteBuffer inputBuffer = ByteBuffer.allocate(...) * while (extractor.readSampleData(inputBuffer, ...) &gt;= 0) { * int trackIndex = extractor.getSampleTrackIndex(); * long presentationTimeUs = extractor.getSampleTime(); * ... * extractor.advance(); * } * * extractor.release(); * extractor = null; * </pre> */ final public class MediaExtractor { public MediaExtractor() { native_setup(); } /** * Sets the DataSource object to be used as the data source for this extractor * {@hide} */ public native final void setDataSource(DataSource source) throws IOException; /** * Sets the data source as a content Uri. * * @param context the Context to use when resolving the Uri * @param uri the Content URI of the data you want to extract from. * @param headers the headers to be sent together with the request for the data */ public final void setDataSource( Context context, Uri uri, Map<String, String> headers) throws IOException { String scheme = uri.getScheme(); if(scheme == null || scheme.equals("file")) { setDataSource(uri.getPath()); return; } AssetFileDescriptor fd = null; try { ContentResolver resolver = context.getContentResolver(); fd = resolver.openAssetFileDescriptor(uri, "r"); if (fd == null) { return; } // Note: using getDeclaredLength so that our behavior is the same // as previous versions when the content provider is returning // a full file. if (fd.getDeclaredLength() < 0) { setDataSource(fd.getFileDescriptor()); } else { setDataSource( fd.getFileDescriptor(), fd.getStartOffset(), fd.getDeclaredLength()); } return; } catch (SecurityException ex) { } catch (IOException ex) { } finally { if (fd != null) { fd.close(); } } setDataSource(uri.toString(), headers); } /** * Sets the data source (file-path or http URL) to use. * * @param path the path of the file, or the http URL * @param headers the headers associated with the http request for the stream you want to play */ public final void setDataSource(String path, Map<String, String> headers) throws IOException { String[] keys = null; String[] values = null; if (headers != null) { keys = new String[headers.size()]; values = new String[headers.size()]; int i = 0; for (Map.Entry<String, String> entry: headers.entrySet()) { keys[i] = entry.getKey(); values[i] = entry.getValue(); ++i; } } setDataSource(path, keys, values); } private native final void setDataSource( String path, String[] keys, String[] values) throws IOException; /** * Sets the data source (file-path or http URL) to use. * * @param path the path of the file, or the http URL of the stream * * <p>When <code>path</code> refers to a local file, the file may actually be opened by a * process other than the calling application. This implies that the pathname * should be an absolute path (as any other process runs with unspecified current working * directory), and that the pathname should reference a world-readable file. * As an alternative, the application could first open the file for reading, * and then use the file descriptor form {@link #setDataSource(FileDescriptor)}. */ public final void setDataSource(String path) throws IOException { setDataSource(path, null, null); } /** * Sets the data source (FileDescriptor) to use. It is the caller's responsibility * to close the file descriptor. It is safe to do so as soon as this call returns. * * @param fd the FileDescriptor for the file you want to extract from. */ public final void setDataSource(FileDescriptor fd) throws IOException { setDataSource(fd, 0, 0x7ffffffffffffffL); } /** * Sets the data source (FileDescriptor) to use. The FileDescriptor must be * seekable (N.B. a LocalSocket is not seekable). It is the caller's responsibility * to close the file descriptor. It is safe to do so as soon as this call returns. * * @param fd the FileDescriptor for the file you want to extract from. * @param offset the offset into the file where the data to be extracted starts, in bytes * @param length the length in bytes of the data to be extracted */ public native final void setDataSource( FileDescriptor fd, long offset, long length) throws IOException; @Override protected void finalize() { native_finalize(); } /** * Make sure you call this when you're done to free up any resources * instead of relying on the garbage collector to do this for you at * some point in the future. */ public native final void release(); /** * Count the number of tracks found in the data source. */ public native final int getTrackCount(); /** * Get the PSSH info if present. * @return a map of uuid-to-bytes, with the uuid specifying * the crypto scheme, and the bytes being the data specific to that scheme. */ public Map<UUID, byte[]> getPsshInfo() { Map<UUID, byte[]> psshMap = null; Map<String, Object> formatMap = getFileFormatNative(); if (formatMap != null && formatMap.containsKey("pssh")) { ByteBuffer rawpssh = (ByteBuffer) formatMap.get("pssh"); rawpssh.order(ByteOrder.nativeOrder()); rawpssh.rewind(); formatMap.remove("pssh"); // parse the flat pssh bytebuffer into something more manageable psshMap = new HashMap<UUID, byte[]>(); while (rawpssh.remaining() > 0) { rawpssh.order(ByteOrder.BIG_ENDIAN); long msb = rawpssh.getLong(); long lsb = rawpssh.getLong(); UUID uuid = new UUID(msb, lsb); rawpssh.order(ByteOrder.nativeOrder()); int datalen = rawpssh.getInt(); byte [] psshdata = new byte[datalen]; rawpssh.get(psshdata); psshMap.put(uuid, psshdata); } } return psshMap; } private native Map<String, Object> getFileFormatNative(); /** * Get the track format at the specified index. * More detail on the representation can be found at {@link android.media.MediaCodec} */ public MediaFormat getTrackFormat(int index) { return new MediaFormat(getTrackFormatNative(index)); } private native Map<String, Object> getTrackFormatNative(int index); /** * Subsequent calls to {@link #readSampleData}, {@link #getSampleTrackIndex} and * {@link #getSampleTime} only retrieve information for the subset of tracks * selected. * Selecting the same track multiple times has no effect, the track is * only selected once. */ public native void selectTrack(int index); /** * Subsequent calls to {@link #readSampleData}, {@link #getSampleTrackIndex} and * {@link #getSampleTime} only retrieve information for the subset of tracks * selected. */ public native void unselectTrack(int index); /** * If possible, seek to a sync sample at or before the specified time */ public static final int SEEK_TO_PREVIOUS_SYNC = 0; /** * If possible, seek to a sync sample at or after the specified time */ public static final int SEEK_TO_NEXT_SYNC = 1; /** * If possible, seek to the sync sample closest to the specified time */ public static final int SEEK_TO_CLOSEST_SYNC = 2; /** * All selected tracks seek near the requested time according to the * specified mode. */ public native void seekTo(long timeUs, int mode); /** * Advance to the next sample. Returns false if no more sample data * is available (end of stream). */ public native boolean advance(); /** * Retrieve the current encoded sample and store it in the byte buffer * starting at the given offset. Returns the sample size (or -1 if * no more samples are available). */ public native int readSampleData(ByteBuffer byteBuf, int offset); /** * Returns the track index the current sample originates from (or -1 * if no more samples are available) */ public native int getSampleTrackIndex(); /** * Returns the current sample's presentation time in microseconds. * or -1 if no more samples are available. */ public native long getSampleTime(); // Keep these in sync with their equivalents in NuMediaExtractor.h /** * The sample is a sync sample */ public static final int SAMPLE_FLAG_SYNC = 1; /** * The sample is (at least partially) encrypted, see also the documentation * for {@link android.media.MediaCodec#queueSecureInputBuffer} */ public static final int SAMPLE_FLAG_ENCRYPTED = 2; /** * Returns the current sample's flags. */ public native int getSampleFlags(); /** * If the sample flags indicate that the current sample is at least * partially encrypted, this call returns relevant information about * the structure of the sample data required for decryption. * @param info The android.media.MediaCodec.CryptoInfo structure * to be filled in. * @return true iff the sample flags contain {@link #SAMPLE_FLAG_ENCRYPTED} */ public native boolean getSampleCryptoInfo(MediaCodec.CryptoInfo info); /** * Returns an estimate of how much data is presently cached in memory * expressed in microseconds. Returns -1 if that information is unavailable * or not applicable (no cache). */ public native long getCachedDuration(); /** * Returns true iff we are caching data and the cache has reached the * end of the data stream (for now, a future seek may of course restart * the fetching of data). * This API only returns a meaningful result if {@link #getCachedDuration} * indicates the presence of a cache, i.e. does NOT return -1. */ public native boolean hasCacheReachedEndOfStream(); private static native final void native_init(); private native final void native_setup(); private native final void native_finalize(); static { System.loadLibrary("media_jni"); native_init(); } private long mNativeContext; }
package sql.fredy.metadata; /** Tables lists the tables found in the Database. It is very often used inside Admin. * Admin is a Tool around JDBC-enabled SQL-Databases to do basic jobs * for DB-Administrations, as create / delete / alter and query tables * it also creates indices and generates simple Java-Code to access DBMS-tables * and exports data into various formats * * Copyright (c) 2017 Fredy Fischer, sql@hulmen.ch Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. **/ import java.util.*; import java.sql.*; import sql.fredy.share.t_connect; public class TableColumns { // Fredy's make Version private static String fredysVersion = "Version 1.4 2. Jan.2002"; public String getVersion() {return fredysVersion; } Vector allCols; Vector allNames; Vector columnInfo; String Table; t_connect con=null; /** * Get the value of con. * @return value of con. */ public t_connect getCon() { if ( con == null) { con = new t_connect(getHost(), getUser(), getPassword(), getDatabase()); if ( ! con.acceptsConnection() ) con = null; } return con; } /** * Set the value of con. * @param v Value to assign to con. */ public void setCon(t_connect v) { this.con = v; setHost(con.getHost()); setUser(con.getUser()); setPassword(con.getPassword()); setDatabase(con.getDatabase()); } /** * Get the value of Table. * @return Value of Table. */ public String getTable() {return Table;} /** * Set the value of Table. * @param v Value to assign to Table. */ public void setTable(String v) {this.Table = v;} /** * Get the value of allCols. * @return Value of allCols. */ public Vector getAllCols() {return allCols;} /** * Get the value of columnInfo. * @return Value of columnInfo. */ public Vector getColumnInfo() {return columnInfo;} /** * Set the value of columnInfo. * @param v Value to assign to columnInfo. */ public void setColumnInfo(Vector v) {this.columnInfo = v;} /** * get the all Column Names. * @return Value of allNames. */ public Vector getAllNames() {return allNames;} /** * Get the value of host. * @return Value of host. */ private String host; public String getHost() {return host;} /** * Set the value of host. * @param v Value to assign to host. */ public void setHost(String v) {this.host = v;} /** to find out, when the user wants to close * this application, set a listener onto (JButton)AutoForm.cancel **/ private String user; /** * Get the value of user. * @return Value of user. */ public String getUser() {return user;} /** * Set the value of user. * @param v Value to assign to user. */ public void setUser(String v) {this.user = v;} private String password; /** * Get the value of password. * @return Value of password. */ public String getPassword() {return password;} /** * Set the value of password. * @param v Value to assign to password. */ public void setPassword(String v) {this.password = v;} private String database; /** * Get the value of database. * @return Value of database. */ public String getDatabase() {return database;} /** * Set the value of database. * @param v Value to assign to database. */ public void setDatabase(String v) {this.database = v;} boolean standAlone=true; /** * Get the value of standAlone. * @return value of standAlone. */ public boolean isStandAlone() { return standAlone; } /** * Set the value of standAlone. * @param v Value to assign to standAlone. */ public void setStandAlone(boolean v) { this.standAlone = v; } public SingleColumnInfo getColumn(int columnNo) { try { return (SingleColumnInfo) columnInfo.elementAt(columnNo); } catch (ArrayIndexOutOfBoundsException aiob) { System.out.println("\nClass TableColumn, Method getColumn(" + columnNo + ") Error: "+ aiob); return null; } } public Object getColumnInfo(int columnNo, int columnDescription) { SingleColumnInfo sci = new SingleColumnInfo(); sci = getColumn(columnNo); Vector v = new Vector(); v = sci.getDataVector(); return v.elementAt(columnDescription); } public int getDecDigits(int columnNo) { SingleColumnInfo sci = new SingleColumnInfo(); sci = getColumn(columnNo); return sci.getDecimal_digits(); } int NumberOfColumns; /** * Get the value of NumberOfColumns. * @return Value of NumberOfColumns. */ public int getNumberOfColumns() {return NumberOfColumns;} /** * Set the value of NumberOfColumns. * @param v Value to assign to NumberOfColumns. */ public void setNumberOfColumns(int v) {this.NumberOfColumns = v;} public TableColumns(String host, String user, String password, String database, String table) { setStandAlone(true); setHost(host); setUser(user); setPassword(password); setDatabase(database); setTable(table); con = new t_connect(getHost(), getUser(), getPassword(),getDatabase()); inits(); } public TableColumns(t_connect con,String table) { this.setCon(con); setStandAlone(false); setTable(table); inits(); } private void inits() { allCols = new Vector(); allNames = new Vector(); columnInfo = new Vector(); if (con.getError() != null) { } else { try { DatabaseMetaData md = con.con.getMetaData(); ResultSet cols = md.getColumns(null,null,getTable(),"%"); int i = 0; while (cols.next()) { Columns col = new Columns(i,cols.getString(4),cols.getString(5),cols.getInt(7),cols.getInt(9),getTable()); allCols.addElement(col); allNames.addElement(cols.getString(4)); try { // doing complete ColumnInfo Vector colTemp = new Vector(); colTemp.addElement((String)cols.getString(1)); colTemp.addElement((String)cols.getString(2)); colTemp.addElement((String)cols.getString(3)); colTemp.addElement((String)cols.getString(4)); colTemp.addElement((Short)new Short(cols.getShort(5))); colTemp.addElement((String)cols.getString(6)); colTemp.addElement((Integer)new Integer(cols.getInt(7))); colTemp.addElement((Integer)new Integer(0)); colTemp.addElement((Integer)new Integer(cols.getInt(9))); colTemp.addElement((Integer)new Integer(cols.getInt(10))); colTemp.addElement((Integer)new Integer(cols.getInt(11))); colTemp.addElement((String)cols.getString(12)); colTemp.addElement((String)cols.getString(13)); colTemp.addElement((Integer)new Integer(cols.getInt(14))); colTemp.addElement((Integer)new Integer(cols.getInt(15))); colTemp.addElement((Integer)new Integer(cols.getInt(16))); colTemp.addElement((Integer)new Integer(cols.getInt(17))); colTemp.addElement((String)cols.getString(18)); SingleColumnInfo sctt = new SingleColumnInfo(colTemp); sctt.setPrimaryKey(md); columnInfo.addElement(sctt); } catch (Exception excp1) { System.out.println("Exception in TableColumns, Table: " + cols.getString(4)); excp1.printStackTrace(); } i++; } setNumberOfColumns(i-1); } catch (Exception exception) { System.out.println("TableColumns: " + exception.getMessage().toString()); } } if (isStandAlone()) { try { con.close(); } catch (Exception ec1) { ec1.printStackTrace(); } } } }
package com.company.GUI; import com.company.Gameplay.Army; import com.company.Enumerations.Commodity; import com.company.Enumerations.Equipment; import com.company.Enumerations.WealthLevel; import com.company.Interfaces.IProducible; import com.company.Interfaces.Initialisable; import com.company.Gameplay.Settlement; import com.company.Gameplay.WorkingGroup; import com.company.hexgame; import javafx.beans.property.SimpleIntegerProperty; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.*; import javafx.scene.control.Button; import javafx.scene.control.Label; import javafx.stage.Stage; import java.io.IOException; import java.util.*; /** * Panel that shows the status of the selected settlement */ public class SettlementScreen implements Initialisable { private Settlement settlement; @FXML private Label foodReservesLabel; @FXML private Label foodProducedLabel; @FXML private Label wealthLevels; @FXML private Label peopleLabel; @FXML private Label starvationLabel; @FXML private Button createArmyButton; @FXML private ListView<String> assetsListView; @FXML private ListView<Army> armyListView; @FXML private TextField nameTextField; @FXML private ListView<WorkingGroup> workingGroupListView; @FXML private ListView<String> equipmentListView; @FXML private Button orderEquipmentButton; @FXML private ComboBox equipmentToOrderCombobox; @FXML private Slider amountToOrderSlider; @FXML private Label amountToOrderLabel; @FXML private Label idlePeopleLabel; @FXML private void createArmy() { Army armySelected; if (armyListView.getSelectionModel().getSelectedItem()!=null) armySelected = armyListView.getSelectionModel().getSelectedItem(); else armySelected = new Army(settlement,0, new HashMap<>()); try { FXMLLoader fxmlLoader = new FXMLLoader(getClass().getResource("ArmyScreen.fxml")); Parent root = fxmlLoader.load(); ((Initialisable)(fxmlLoader.getController())).initData(armySelected, this); // hexgame.getMainTimer().getScreensToRefresh().add(fxmlLoader.getController()); Stage stage = new Stage(); stage.setScene(new Scene(root)); stage.setOnHidden(e->((ArmyScreen)fxmlLoader.getController()).shutdown()); stage.show(); } catch (IOException e) { e.getLocalizedMessage(); }; //ArmyScreen armyScreen = new ArmyScreen(armySelected); // settlement.createArmy(); if (!settlement.getArmies().contains(armySelected)) settlement.getArmies().add(armySelected); hexgame.mainScreen.updateControls(); this.updateControls(); } @FXML private void orderEquipment() { if (equipmentToOrderCombobox.getValue() != null && (int) amountToOrderSlider.getValue() != 0) { settlement.orderEquipment((Equipment) equipmentToOrderCombobox.getValue(), (int) amountToOrderSlider.getValue()); this.updateControls(); } } @Override public Object getEntity() { return settlement; } @FXML private void renameVillage() { settlement.setName(nameTextField.getText()); } /** * This method is called to update the status of settlement after economical tick */ public void updateControls() { // for (Army army : settlement.getArmies()) // armyListView = new ListView<>(); armyListView.getItems().setAll(settlement.getArmies()); nameTextField.setText(settlement.getName()); //Show the list of the working groups workingGroupListView.getItems().clear(); HashSet<WorkingGroup> workingGroups = settlement.getWorkingGroups(); for (WorkingGroup workingGroup : workingGroups) { if (workingGroup.getPeople() > 0 && !workingGroupListView.getItems().contains(workingGroup)) workingGroupListView.getItems().add(workingGroup); } //Show buildings currently in the village assetsListView.getItems().clear(); HashMap<Commodity, Integer> assets = settlement.getAssets(); String assetString = new String(); for (Map.Entry<Commodity, Integer> asset : assets.entrySet()) { assetString = ""; if (asset.getValue() > 0) assetString = asset.getKey().getName() + ": " + asset.getValue(); if (settlement.commodityPerTurn(asset.getKey(), false) > 0) if (assetString == "") assetString = asset.getKey() + ", under construction: " + settlement.commodityPerTurn(asset.getKey(), false); else assetString += ", under construction: " + settlement.commodityPerTurn(asset.getKey(), false); if (assetString!="") assetsListView.getItems().add(assetString); } //show the settlement's armoury content: equipmentListView.getItems().clear(); HashSet<Equipment> equipmentInPresence = new HashSet<>(); for (Map.Entry<Equipment, Integer> equipment : settlement.getEquipment().entrySet()) { String equipmentString = new String(equipment.getKey().getName()); equipmentInPresence.add(equipment.getKey()); if (equipment.getValue() > 0) equipmentString += " (" + equipment.getValue() + ")"; if (settlement.getEquipmentOrders().getOrDefault(equipment.getKey(), 0) > 0) equipmentString += ", ordered: " + settlement.getEquipmentOrders().getOrDefault(equipment.getKey(), 0); equipmentListView.getItems().add(equipmentString); } for (Map.Entry<Equipment, Integer> equipment : settlement.getEquipmentOrders().entrySet()) { if (equipment.getValue() > 0 && !equipmentInPresence.contains(equipment.getKey())) equipmentListView.getItems().add(equipment.getKey().getName() + " ordered:" + equipment.getValue()); } //workingGroupListView.getItems().addAll(settlement.getWorkingGroups().stream().filter(a->a.getPeople()>0).collect(Collectors.toCollection(HashSet<>))); //Prepare list of equipment available to order Equipment selectedEquipment = (Equipment)equipmentToOrderCombobox.getValue(); equipmentToOrderCombobox.getItems().clear(); equipmentToOrderCombobox.getItems().addAll(settlement.getOwner().getEquipmentAvailableToOrder()); equipmentToOrderCombobox.getSelectionModel().select(selectedEquipment); wealthLevels.setText(populateWealthLabel()); //populate idle people label idlePeopleLabel.setText("Total people: " + settlement.getTotalPeople()+", unemployed: " + settlement.getIdlePeople()); } @FXML private String populateWealthLabel() { String result = ""; HashMap<WealthLevel, Integer> settlementWealths = settlement.getWealthLevels(); for (Map.Entry<WealthLevel, Integer> wl : settlementWealths.entrySet()) result += wl.getKey().getName() + ": " + wl.getValue().toString() + "\n"; return result; } @Override public void initData(Object object, Object caller) { // armyListView = new ListView<>(); this.settlement = (Settlement) object; //assetsTableView.getColumns().get(0). foodReservesLabel.textProperty().bind(new SimpleIntegerProperty(settlement.getFoodReserves()).asString()); foodProducedLabel.textProperty().bind(new SimpleIntegerProperty(settlement.getFoodProduced()).asString()); peopleLabel.textProperty().bind(new SimpleIntegerProperty(settlement.getTotalPeople()).asString()); starvationLabel.textProperty().bind(new SimpleIntegerProperty(settlement.getStarvationLastTurn()).asString()); updateControls(); } @FXML private void initialize() { amountToOrderSlider.valueProperty().addListener((observable, oldValue, newValue) -> { // settlement.orderEquipment((Equipment) equipmentToOrderCombobox.getValue(), (int) amountToOrderSlider.getValue()); amountToOrderLabel.setText(Long.toString(Math.round(amountToOrderSlider.getValue()))); }); equipmentToOrderCombobox.valueProperty().addListener((observable, oldValue, newValue) -> { if (newValue != null) // set range for order amount on equipment selected amountToOrderSlider.setMax((double) (settlement.getSettlementTreasury()/((IProducible)newValue).getLaboriousness())); else amountToOrderSlider.setMax(0); }); nameTextField.textProperty().addListener((observable, oldValue, newValue) -> { settlement.setName(newValue); }); } }
/** * Copyright (c) 2012-2014 Netflix, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.msl.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import java.util.Collection; import java.util.HashSet; import java.util.Random; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import com.netflix.msl.MslConstants; import com.netflix.msl.MslCryptoException; import com.netflix.msl.MslEncodingException; import com.netflix.msl.MslError; import com.netflix.msl.MslException; import com.netflix.msl.MslInternalException; import com.netflix.msl.MslMasterTokenException; import com.netflix.msl.crypto.ICryptoContext; import com.netflix.msl.crypto.NullCryptoContext; import com.netflix.msl.crypto.SessionCryptoContext; import com.netflix.msl.crypto.SymmetricCryptoContext; import com.netflix.msl.entityauth.EntityAuthenticationScheme; import com.netflix.msl.test.ExpectedMslException; import com.netflix.msl.tokens.MasterToken; import com.netflix.msl.tokens.ServiceToken; import com.netflix.msl.tokens.UserIdToken; import com.netflix.msl.userauth.MockEmailPasswordAuthenticationFactory; /** * Simple MSL store unit tests. * * @author Wesley Miaw <wmiaw@netflix.com> */ public class SimpleMslStoreTest { private static final String KEYSET_ID = "keyset"; private static final String USER_ID = "userid"; /** Maximum number of randomly generated tokens. */ private static final int MAX_TOKENS = 3; /** Stress test pool shutdown timeout in milliseconds. */ private static final int STRESS_TIMEOUT_MILLIS = 3000; /** * @param c1 first collection. * @param c2 second collection. * @return true if each collection contain all elements found in the other. */ private static boolean equal(final Collection<? extends Object> c1, final Collection<? extends Object> c2) { return c1.containsAll(c2) && c2.containsAll(c1); } @Rule public ExpectedMslException thrown = ExpectedMslException.none(); @BeforeClass public static void setup() throws MslEncodingException, MslCryptoException { ctx = new MockMslContext(EntityAuthenticationScheme.NONE, false); } @AfterClass public static void teardown() { ctx = null; } @Before public void createStore() { store = new SimpleMslStore(); } @After public void destroyStore() { store = null; } @Test public void storeCryptoContext() throws MslEncodingException, MslCryptoException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); assertNull(store.getCryptoContext(masterToken)); final ICryptoContext cc1 = new SymmetricCryptoContext(ctx, KEYSET_ID, masterToken.getEncryptionKey(), masterToken.getSignatureKey(), null); store.setCryptoContext(masterToken, cc1); final ICryptoContext cc2 = store.getCryptoContext(masterToken); assertNotNull(cc2); assertSame(cc1, cc2); assertEquals(masterToken, store.getMasterToken()); } @Test public void replaceCryptoContext() throws MslEncodingException, MslCryptoException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cc1 = new SymmetricCryptoContext(ctx, KEYSET_ID, masterToken.getEncryptionKey(), masterToken.getSignatureKey(), null); final ICryptoContext cc2 = new NullCryptoContext(); store.setCryptoContext(masterToken, cc1); final ICryptoContext cc3 = store.getCryptoContext(masterToken); assertSame(cc1, cc3); assertNotSame(cc2, cc3); store.setCryptoContext(masterToken, cc2); final ICryptoContext cc4 = store.getCryptoContext(masterToken); assertNotSame(cc1, cc4); assertSame(cc2, cc4); assertEquals(masterToken, store.getMasterToken()); } @Test public void removeCryptoContext() throws MslEncodingException, MslCryptoException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); store.setCryptoContext(masterToken, cryptoContext); store.removeCryptoContext(masterToken); assertNull(store.getMasterToken()); assertNull(store.getCryptoContext(masterToken)); } @Test public void clearCryptoContext() throws MslEncodingException, MslCryptoException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cc1 = new SymmetricCryptoContext(ctx, KEYSET_ID, masterToken.getEncryptionKey(), masterToken.getSignatureKey(), null); store.setCryptoContext(masterToken, cc1); store.clearCryptoContexts(); assertNull(store.getCryptoContext(masterToken)); assertNull(store.getMasterToken()); } @Test public void twoCryptoContexts() throws MslEncodingException, MslCryptoException, MslMasterTokenException { final MasterToken mtA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken mtB = MslTestUtils.getMasterToken(ctx, 2, 1); final ICryptoContext ccMtA1 = new SessionCryptoContext(ctx, mtA); final ICryptoContext ccMtB1 = new SessionCryptoContext(ctx, mtB); store.setCryptoContext(mtA, ccMtA1); store.setCryptoContext(mtB, ccMtB1); final ICryptoContext ccMtA2 = store.getCryptoContext(mtA); assertNotNull(ccMtA2); assertSame(ccMtA1, ccMtA2); final ICryptoContext ccMtB2 = store.getCryptoContext(mtB); assertNotNull(ccMtB2); assertSame(ccMtB1, ccMtB2); assertEquals(mtB, store.getMasterToken()); } @Test public void replaceTwoCryptoContexts() throws MslEncodingException, MslCryptoException, MslMasterTokenException { final MasterToken mtA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken mtB = MslTestUtils.getMasterToken(ctx, 2, 1); final ICryptoContext ccMtA1 = new SessionCryptoContext(ctx, mtA); final ICryptoContext ccMtB1 = new SessionCryptoContext(ctx, mtB); store.setCryptoContext(mtA, ccMtA1); store.setCryptoContext(mtB, ccMtB1); assertEquals(mtB, store.getMasterToken()); final ICryptoContext ccNull = new NullCryptoContext(); store.setCryptoContext(mtA, ccNull); final ICryptoContext ccMtA2 = store.getCryptoContext(mtA); assertNotNull(ccMtA2); assertNotSame(ccMtA1, ccMtA2); assertSame(ccNull, ccMtA2); final ICryptoContext ccMtB2 = store.getCryptoContext(mtB); assertNotNull(ccMtB2); assertSame(ccMtB1, ccMtB2); assertEquals(mtB, store.getMasterToken()); } @Test public void clearTwoCryptoContexts() throws MslEncodingException, MslCryptoException, MslMasterTokenException { final MasterToken mtA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken mtB = MslTestUtils.getMasterToken(ctx, 2, 1); final ICryptoContext ccMtA1 = new SessionCryptoContext(ctx, mtA); final ICryptoContext ccMtB1 = new SessionCryptoContext(ctx, mtB); store.setCryptoContext(mtA, ccMtA1); store.setCryptoContext(mtB, ccMtB1); store.clearCryptoContexts(); assertNull(store.getCryptoContext(mtA)); assertNull(store.getCryptoContext(mtA)); assertNull(store.getMasterToken()); } @Test public void removeTwoCryptoContexts() throws MslEncodingException, MslCryptoException, MslMasterTokenException { final MasterToken mtA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken mtB = MslTestUtils.getMasterToken(ctx, 2, 1); final ICryptoContext ccMtA1 = new SessionCryptoContext(ctx, mtA); final ICryptoContext ccMtB1 = new SessionCryptoContext(ctx, mtB); store.setCryptoContext(mtA, ccMtA1); store.setCryptoContext(mtB, ccMtB1); store.removeCryptoContext(mtA); assertNull(store.getCryptoContext(mtA)); assertEquals(ccMtB1, store.getCryptoContext(mtB)); } /** * Crypto context add/remove stress test runner. * * Randomly adds or removes a crypto context for one of many master tokens * (by master token entity identity). Also iterates through the set crypto * contexts. */ private static class CryptoContextStressor implements Runnable { /** * Create a new crypto context stressor. * * @param ctx MSL context. * @param store MSL store. * @param count the number of master token identities to stress. */ public CryptoContextStressor(final MslContext ctx, final MslStore store, final int count) { this.ctx = ctx; this.store = store; this.count = count; } /* (non-Javadoc) * @see java.lang.Runnable#run() */ @Override public void run() { final Random r = new Random(); try { for (int i = 0; i < 10 * count; ++i) { final int tokenIndex = r.nextInt(count); final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, tokenIndex, 1); final int option = r.nextInt(4); switch (option) { case 0: store.setCryptoContext(masterToken, null); break; case 1: final ICryptoContext cryptoContext = new SessionCryptoContext(ctx, masterToken); store.setCryptoContext(masterToken, cryptoContext); break; case 2: store.getCryptoContext(masterToken); break; case 3: store.removeCryptoContext(masterToken); break; } } } catch (final MslMasterTokenException e) { throw new MslInternalException("Unexpected master token exception.", e); } catch (final MslEncodingException e) { throw new MslInternalException("Unexpected master token encoding exception.", e); } catch (final MslCryptoException e) { throw new MslInternalException("Unexpected master token creation exception.", e); } } /** MSL context. */ private final MslContext ctx; /** MSL store. */ private final MslStore store; /** Number of crypto context identities. */ private final int count; } @Test public void stressCryptoContexts() throws InterruptedException, MslEncodingException, MslCryptoException { final ExecutorService service = Executors.newCachedThreadPool(); for (int i = 0; i < 10 * MAX_TOKENS; ++i) { service.execute(new CryptoContextStressor(ctx, store, MAX_TOKENS)); } service.shutdown(); assertTrue(service.awaitTermination(STRESS_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS)); } @Test public void nonReplayableId() throws MslEncodingException, MslCryptoException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); for (int i = 1; i < 10; ++i) assertEquals(i, store.getNonReplayableId(masterToken)); } @Ignore @Test public void wrappedNonReplayableId() throws MslEncodingException, MslCryptoException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); for (long i = 1; i < MslConstants.MAX_LONG_VALUE; ++i) store.getNonReplayableId(masterToken); assertEquals(MslConstants.MAX_LONG_VALUE, store.getNonReplayableId(masterToken)); assertEquals(0, store.getNonReplayableId(masterToken)); assertEquals(1, store.getNonReplayableId(masterToken)); } @Test public void twoNonReplayableIds() throws MslEncodingException, MslCryptoException { final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2); for (int i = 1; i < 10; ++i) { assertEquals(i, store.getNonReplayableId(masterTokenA)); assertEquals(i, store.getNonReplayableId(masterTokenB)); } } @Test public void addUserIdToken() throws MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final ICryptoContext cryptoContext = new NullCryptoContext(); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(USER_ID, userIdToken); assertEquals(userIdToken, store.getUserIdToken(USER_ID)); assertNull(store.getUserIdToken(USER_ID + "x")); } @Test public void removeUserIdToken() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final ICryptoContext cryptoContext = new NullCryptoContext(); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(USER_ID, userIdToken); store.removeUserIdToken(userIdToken); assertNull(store.getUserIdToken(USER_ID)); } @Test public void replaceUserIdToken() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(USER_ID, userIdTokenA); store.addUserIdToken(USER_ID, userIdTokenB); assertEquals(userIdTokenB, store.getUserIdToken(USER_ID)); } @Test public void twoUserIdTokens() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); assertEquals(userIdTokenA, store.getUserIdToken(userIdA)); assertEquals(userIdTokenB, store.getUserIdToken(userIdB)); } @Test public void replaceTwoUserIdTokens() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); final UserIdToken userIdTokenC = MslTestUtils.getUserIdToken(ctx, masterToken, 3, MockEmailPasswordAuthenticationFactory.USER); store.addUserIdToken(userIdA, userIdTokenC); assertEquals(userIdTokenC, store.getUserIdToken(userIdA)); assertEquals(userIdTokenB, store.getUserIdToken(userIdB)); } @Test public void removeTwoUserIdTokens() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); store.removeUserIdToken(userIdTokenA); assertNull(store.getUserIdToken(userIdA)); assertEquals(userIdTokenB, store.getUserIdToken(userIdB)); } @Test public void clearUserIdTokens() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); store.clearUserIdTokens(); assertNull(store.getUserIdToken(userIdA)); assertNull(store.getUserIdToken(userIdB)); } @Test public void unknownMasterTokenUserIdToken() throws MslEncodingException, MslCryptoException, MslException { thrown.expect(MslException.class); thrown.expectMslError(MslError.USERIDTOKEN_MASTERTOKEN_NOT_FOUND); final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); store.addUserIdToken(USER_ID, userIdToken); } @Test public void removeMasterTokenSameSerialNumberUserIdTokens() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 2, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final String userIdC = USER_ID + "C"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenA, 2, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenC = MslTestUtils.getUserIdToken(ctx, masterTokenB, 1, MockEmailPasswordAuthenticationFactory.USER); store.setCryptoContext(masterTokenA, cryptoContext); store.setCryptoContext(masterTokenB, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); store.addUserIdToken(userIdC, userIdTokenC); // We still have a master token with serial number 1 so no user ID // tokens should be deleted. store.removeCryptoContext(masterTokenA); assertEquals(userIdTokenA, store.getUserIdToken(userIdA)); assertEquals(userIdTokenB, store.getUserIdToken(userIdB)); assertEquals(userIdTokenC, store.getUserIdToken(userIdC)); } @Test public void removeMasterTokenReissuedUserIdTokens() throws MslEncodingException, MslCryptoException, MslException { // Master token B has a new serial number, to invalidate the old master // token and its user ID tokens. final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final String userIdC = USER_ID + "C"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenA, 2, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenC = MslTestUtils.getUserIdToken(ctx, masterTokenB, 1, MockEmailPasswordAuthenticationFactory.USER); store.setCryptoContext(masterTokenA, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); store.setCryptoContext(masterTokenB, cryptoContext); store.addUserIdToken(userIdC, userIdTokenC); // All of master token A's user ID tokens should be deleted. store.removeCryptoContext(masterTokenA); assertNull(store.getUserIdToken(userIdA)); assertNull(store.getUserIdToken(userIdB)); assertEquals(userIdTokenC, store.getUserIdToken(userIdC)); } @Test public void clearCryptoContextsUserIdTokens() throws MslEncodingException, MslCryptoException, MslException { // Master token B has a new serial number, to invalidate the old master // token and its user ID tokens. final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenB, 2, MockEmailPasswordAuthenticationFactory.USER); store.setCryptoContext(masterTokenA, cryptoContext); store.setCryptoContext(masterTokenB, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); // All user ID tokens should be deleted. store.clearCryptoContexts(); assertNull(store.getUserIdToken(userIdA)); assertNull(store.getUserIdToken(userIdB)); } /** * User ID token add/remove stress test runner. * * Randomly adds or removes user ID tokens. Also iterates through the user * ID tokens. */ private static class UserIdTokenStressor implements Runnable { /** * Create a new service token stressor. * * @param ctx MSL context. * @param store MSL store. * @param count the number of master token and user ID tokens to create * combinations of. */ public UserIdTokenStressor(final MslContext ctx, final MslStore store, final int count) { this.ctx = ctx; this.store = store; this.count = count; } /* (non-Javadoc) * @see java.lang.Runnable#run() */ @Override public void run() { final Random r = new Random(); try { for (int i = 0; i < 10 * count; ++i) { final int tokenIndex = r.nextInt(count); final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, tokenIndex, 1); final long userId = r.nextInt(count); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, userId, MockEmailPasswordAuthenticationFactory.USER); final int option = r.nextInt(3); switch (option) { case 0: { store.setCryptoContext(masterToken, new NullCryptoContext()); store.addUserIdToken(USER_ID + userId, userIdToken); break; } case 1: { store.getUserIdToken(USER_ID + userId); break; } case 2: { store.removeUserIdToken(userIdToken); break; } } } } catch (final MslMasterTokenException e) { throw new MslInternalException("Unexpected master token exception.", e); } catch (final MslEncodingException e) { throw new MslInternalException("Unexpected master token encoding exception.", e); } catch (final MslCryptoException e) { throw new MslInternalException("Unexpected master token creation exception.", e); } catch (final MslException e) { throw new MslInternalException("Master token / user ID token service token query mismatch.", e); } } /** MSL context. */ private final MslContext ctx; /** MSL store. */ private final MslStore store; /** Number of master token and user ID token identities. */ private final int count; } @Test public void stressUserIdTokens() throws InterruptedException { final ExecutorService service = Executors.newCachedThreadPool(); for (int i = 0; i < 10 * MAX_TOKENS; ++i) { service.execute(new UserIdTokenStressor(ctx, store, MAX_TOKENS)); } service.shutdown(); assertTrue(service.awaitTermination(STRESS_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS)); } @Test public void masterBoundServiceTokens() throws MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, null); store.setCryptoContext(masterToken, cryptoContext); final Set<ServiceToken> emptyTokens = store.getServiceTokens(masterToken, null); assertNotNull(emptyTokens); assertEquals(0, emptyTokens.size()); store.addServiceTokens(tokens); final Set<ServiceToken> storedTokens = store.getServiceTokens(masterToken, null); assertNotNull(storedTokens); assertTrue(equal(tokens, storedTokens)); } @Test public void missingMasterTokenAddServiceTokens() throws MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, null); MslException exception = null; try { store.addServiceTokens(tokens); } catch (final MslException e) { exception = e; } assertNotNull(exception); final Set<ServiceToken> emptyTokens = store.getServiceTokens(masterToken, null); assertNotNull(emptyTokens); assertEquals(0, emptyTokens.size()); } @Test public void userBoundServiceTokens() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final ICryptoContext cryptoContext = new NullCryptoContext(); final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, userIdToken); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(USER_ID, userIdToken); final Set<ServiceToken> emptyTokens = store.getServiceTokens(masterToken, userIdToken); assertNotNull(emptyTokens); assertEquals(0, emptyTokens.size()); store.addServiceTokens(tokens); final Set<ServiceToken> storedTokens = store.getServiceTokens(masterToken, userIdToken); assertNotNull(storedTokens); assertTrue(equal(tokens, storedTokens)); } @Test public void missingUserIdTokenAddServiceTokens() throws MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final ICryptoContext cryptoContext = new NullCryptoContext(); final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, userIdToken); store.setCryptoContext(masterToken, cryptoContext); MslException exception = null; try { store.addServiceTokens(tokens); } catch (final MslException e) { exception = e; } assertNotNull(exception); final Set<ServiceToken> emptyTokens = store.getServiceTokens(masterToken, null); assertNotNull(emptyTokens); assertEquals(0, emptyTokens.size()); } @Test public void unboundServiceTokens() throws MslException { final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, null, null); final Set<ServiceToken> emptyTokens = store.getServiceTokens(null, null); assertNotNull(emptyTokens); assertEquals(0, emptyTokens.size()); store.addServiceTokens(tokens); final Set<ServiceToken> storedTokens = store.getServiceTokens(null, null); assertNotNull(storedTokens); assertTrue(equal(tokens, storedTokens)); } @Test public void removeMasterBoundServiceTokens() throws MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final ICryptoContext cryptoContext = new NullCryptoContext(); final Set<ServiceToken> masterBoundTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken); final Set<ServiceToken> userBoundTokens = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdToken); final Set<ServiceToken> unboundTokens = MslTestUtils.getServiceTokens(ctx, null, null); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(USER_ID, userIdToken); store.addServiceTokens(masterBoundTokens); store.addServiceTokens(userBoundTokens); store.addServiceTokens(unboundTokens); store.removeServiceTokens(null, masterToken, null); // This should only return the unbound tokens. final Set<ServiceToken> storedMasterBoundTokens = store.getServiceTokens(masterToken, null); assertNotNull(storedMasterBoundTokens); assertTrue(equal(unboundTokens, storedMasterBoundTokens)); // This should only return the unbound tokens. final Set<ServiceToken> storedUserBoundTokens = store.getServiceTokens(masterToken, userIdToken); assertTrue(equal(unboundTokens, storedUserBoundTokens)); // This should only return the unbound tokens. final Set<ServiceToken> storedUnboundTokens = store.getServiceTokens(null, null); assertNotNull(storedUnboundTokens); assertTrue(equal(unboundTokens, storedUnboundTokens)); } @Test public void removeUserBoundServiceTokens() throws MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final ICryptoContext cryptoContext = new NullCryptoContext(); final Set<ServiceToken> masterBoundTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken); final Set<ServiceToken> userBoundTokens = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdToken); final Set<ServiceToken> unboundTokens = MslTestUtils.getServiceTokens(ctx, null, null); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(USER_ID, userIdToken); store.addServiceTokens(masterBoundTokens); store.addServiceTokens(userBoundTokens); store.addServiceTokens(unboundTokens); store.removeServiceTokens(null, masterToken, userIdToken); // This should only return the unbound and master bound-only tokens. final Set<ServiceToken> storedMasterBoundTokens = store.getServiceTokens(masterToken, null); assertNotNull(storedMasterBoundTokens); final Set<ServiceToken> unboundAndMasterBoundTokens = new HashSet<ServiceToken>(); unboundAndMasterBoundTokens.addAll(unboundTokens); unboundAndMasterBoundTokens.addAll(masterBoundTokens); assertTrue(equal(unboundAndMasterBoundTokens, storedMasterBoundTokens)); // This should only return the unbound and master bound-only tokens. final Set<ServiceToken> storedUserBoundTokens = store.getServiceTokens(masterToken, userIdToken); assertNotNull(storedUserBoundTokens); assertTrue(equal(unboundAndMasterBoundTokens, storedUserBoundTokens)); // This should only return the unbound tokens. final Set<ServiceToken> storedUnboundTokens = store.getServiceTokens(null, null); assertNotNull(storedUnboundTokens); assertTrue(equal(unboundTokens, storedUnboundTokens)); } @Test public void removeNoServiceTokens() throws MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final ICryptoContext cryptoContext = new NullCryptoContext(); final Set<ServiceToken> masterBoundTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken); final Set<ServiceToken> userBoundTokens = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdToken); final Set<ServiceToken> unboundTokens = MslTestUtils.getServiceTokens(ctx, null, null); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(USER_ID, userIdToken); store.addServiceTokens(masterBoundTokens); store.addServiceTokens(userBoundTokens); store.addServiceTokens(unboundTokens); store.removeServiceTokens(null, null, null); // This should only return the unbound and master bound tokens. final Set<ServiceToken> storedMasterBoundTokens = store.getServiceTokens(masterToken, null); assertNotNull(storedMasterBoundTokens); final Set<ServiceToken> unboundAndMasterBoundTokens = new HashSet<ServiceToken>(); unboundAndMasterBoundTokens.addAll(unboundTokens); unboundAndMasterBoundTokens.addAll(masterBoundTokens); assertTrue(equal(unboundAndMasterBoundTokens, storedMasterBoundTokens)); // This should return all of the tokens. final Set<ServiceToken> storedUserBoundTokens = store.getServiceTokens(masterToken, userIdToken); assertNotNull(storedUserBoundTokens); final Set<ServiceToken> allTokens = new HashSet<ServiceToken>(); allTokens.addAll(unboundTokens); allTokens.addAll(userBoundTokens); allTokens.addAll(masterBoundTokens); assertTrue(equal(allTokens, storedUserBoundTokens)); // This should only return the unbound tokens. final Set<ServiceToken> storedUnboundTokens = store.getServiceTokens(null, null); assertNotNull(storedUnboundTokens); assertTrue(equal(unboundTokens, storedUnboundTokens)); } @Test public void removeNamedServiceTokens() throws MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final ICryptoContext cryptoContext = new NullCryptoContext(); final Set<ServiceToken> masterBoundTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken); final Set<ServiceToken> userBoundTokens = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdToken); final Set<ServiceToken> unboundTokens = MslTestUtils.getServiceTokens(ctx, null, null); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(USER_ID, userIdToken); store.addServiceTokens(masterBoundTokens); store.addServiceTokens(userBoundTokens); store.addServiceTokens(unboundTokens); final Set<ServiceToken> allTokens = new HashSet<ServiceToken>(); allTokens.addAll(masterBoundTokens); allTokens.addAll(userBoundTokens); allTokens.addAll(unboundTokens); final Random random = new Random(); final Set<ServiceToken> removedTokens = new HashSet<ServiceToken>(); for (final ServiceToken token : allTokens) { if (random.nextBoolean()) continue; store.removeServiceTokens(token.getName(), null, null); removedTokens.add(token); } // This should only return tokens that haven't been removed. final Set<ServiceToken> storedMasterBoundTokens = store.getServiceTokens(masterToken, null); assertNotNull(storedMasterBoundTokens); assertFalse(storedMasterBoundTokens.removeAll(removedTokens)); // This should only return tokens that haven't been removed. final Set<ServiceToken> storedUserBoundTokens = store.getServiceTokens(masterToken, userIdToken); assertNotNull(storedUserBoundTokens); assertFalse(storedUserBoundTokens.removeAll(removedTokens)); // This should only return tokens that haven't been removed. final Set<ServiceToken> storedUnboundTokens = store.getServiceTokens(null, null); assertNotNull(storedUnboundTokens); assertFalse(storedUnboundTokens.removeAll(removedTokens)); } @Test public void clearServiceTokens() throws MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final ICryptoContext cryptoContext = new NullCryptoContext(); final Set<ServiceToken> masterBoundTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken); final Set<ServiceToken> userBoundTokens = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdToken); final Set<ServiceToken> unboundTokens = MslTestUtils.getServiceTokens(ctx, null, null); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(USER_ID, userIdToken); store.addServiceTokens(masterBoundTokens); store.addServiceTokens(userBoundTokens); store.addServiceTokens(unboundTokens); store.clearServiceTokens(); final Set<ServiceToken> storedMasterBoundTokens = store.getServiceTokens(masterToken, null); assertNotNull(storedMasterBoundTokens); assertEquals(0, storedMasterBoundTokens.size()); final Set<ServiceToken> storedUserBoundTokens = store.getServiceTokens(masterToken, userIdToken); assertNotNull(storedUserBoundTokens); assertEquals(0, storedUserBoundTokens.size()); final Set<ServiceToken> storedUnboundTokens = store.getServiceTokens(null, null); assertNotNull(storedUnboundTokens); assertEquals(0, storedUserBoundTokens.size()); } @Test public void mismatchedGetServiceTokens() throws MslException { thrown.expect(MslException.class); thrown.expectMslError(MslError.USERIDTOKEN_MASTERTOKEN_MISMATCH); final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final MasterToken mismatchedMasterToken = MslTestUtils.getMasterToken(ctx, 2, 2); store.getServiceTokens(mismatchedMasterToken, userIdToken); } @Test public void missingMasterTokenGetServiceTokens() throws MslException { thrown.expect(MslException.class); thrown.expectMslError(MslError.USERIDTOKEN_MASTERTOKEN_NULL); final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); store.getServiceTokens(null, userIdToken); } @Test public void mismatchedRemoveServiceTokens() throws MslException { thrown.expect(MslException.class); thrown.expectMslError(MslError.USERIDTOKEN_MASTERTOKEN_MISMATCH); final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final MasterToken mismatchedMasterToken = MslTestUtils.getMasterToken(ctx, 2, 2); store.removeServiceTokens(null, mismatchedMasterToken, userIdToken); } @Test public void removeMasterTokenSameSerialNumberServiceTokens() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 2, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenB, 2, MockEmailPasswordAuthenticationFactory.USER); final Set<ServiceToken> masterBoundServiceTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterTokenA); final Set<ServiceToken> serviceTokensA = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenA, userIdTokenA); final Set<ServiceToken> serviceTokensB = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenB, userIdTokenB); store.setCryptoContext(masterTokenA, cryptoContext); store.setCryptoContext(masterTokenB, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); store.addServiceTokens(masterBoundServiceTokens); store.addServiceTokens(serviceTokensA); store.addServiceTokens(serviceTokensB); // We still have a master token with serial number 1 so no service // tokens should have been deleted. store.removeCryptoContext(masterTokenA); final Set<ServiceToken> storedServiceTokensA = store.getServiceTokens(masterTokenB, userIdTokenA); final Set<ServiceToken> storedServiceTokensB = store.getServiceTokens(masterTokenB, userIdTokenB); final Set<ServiceToken> expectedServiceTokensA = new HashSet<ServiceToken>(masterBoundServiceTokens); expectedServiceTokensA.addAll(serviceTokensA); assertEquals(expectedServiceTokensA, storedServiceTokensA); final Set<ServiceToken> expectedServiceTokensB = new HashSet<ServiceToken>(masterBoundServiceTokens); expectedServiceTokensB.addAll(serviceTokensB); assertEquals(expectedServiceTokensB, storedServiceTokensB); } @Test public void removeMasterTokenReissuedServiceTokens() throws MslEncodingException, MslCryptoException, MslException { // Master token B has a new serial number, to invalidate the old master // token and its user ID tokens. final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenB, 2, MockEmailPasswordAuthenticationFactory.USER); final Set<ServiceToken> masterBoundServiceTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterTokenA); final Set<ServiceToken> serviceTokensA = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenA, userIdTokenA); final Set<ServiceToken> serviceTokensB = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenB, userIdTokenB); store.setCryptoContext(masterTokenA, cryptoContext); store.setCryptoContext(masterTokenB, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); store.addServiceTokens(masterBoundServiceTokens); store.addServiceTokens(serviceTokensA); store.addServiceTokens(serviceTokensB); // All of master token A's user ID tokens should be deleted. store.removeCryptoContext(masterTokenA); assertTrue(store.getServiceTokens(masterTokenA, userIdTokenA).isEmpty()); final Set<ServiceToken> storedServiceTokensB = store.getServiceTokens(masterTokenB, userIdTokenB); assertEquals(serviceTokensB, storedServiceTokensB); } @Test public void clearCryptoContextsServiceTokens() throws MslEncodingException, MslCryptoException, MslException { // Master token B has a new serial number, to invalidate the old master // token and its user ID tokens. final MasterToken masterTokenA = MslTestUtils.getMasterToken(ctx, 1, 1); final MasterToken masterTokenB = MslTestUtils.getMasterToken(ctx, 1, 2); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterTokenA, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterTokenB, 2, MockEmailPasswordAuthenticationFactory.USER); final Set<ServiceToken> unboundServiceTokens = MslTestUtils.getServiceTokens(ctx, null, null); final Set<ServiceToken> serviceTokensA = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenA, userIdTokenA); final Set<ServiceToken> serviceTokensB = MslTestUtils.getUserBoundServiceTokens(ctx, masterTokenB, userIdTokenB); store.setCryptoContext(masterTokenA, cryptoContext); store.setCryptoContext(masterTokenB, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); store.addServiceTokens(unboundServiceTokens); store.addServiceTokens(serviceTokensA); store.addServiceTokens(serviceTokensB); // All bound service tokens should be deleted. store.clearCryptoContexts(); assertEquals(unboundServiceTokens, store.getServiceTokens(masterTokenA, userIdTokenA)); assertEquals(unboundServiceTokens, store.getServiceTokens(masterTokenB, userIdTokenB)); final Set<ServiceToken> storedServiceTokens = store.getServiceTokens(null, null); assertEquals(unboundServiceTokens, storedServiceTokens); } @Test public void removeUserIdTokenServiceTokens() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER); final Set<ServiceToken> masterBoundServiceTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken); final Set<ServiceToken> serviceTokensA = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdTokenA); final Set<ServiceToken> serviceTokensB = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdTokenB); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); store.addServiceTokens(masterBoundServiceTokens); store.addServiceTokens(serviceTokensA); store.addServiceTokens(serviceTokensB); // We should still have all the master token bound and user ID token B // bound service tokens. store.removeUserIdToken(userIdTokenA); final Set<ServiceToken> storedServiceTokens = store.getServiceTokens(masterToken, userIdTokenB); final Set<ServiceToken> expectedServiceTokens = new HashSet<ServiceToken>(masterBoundServiceTokens); expectedServiceTokens.addAll(serviceTokensB); assertEquals(expectedServiceTokens, storedServiceTokens); } @Test public void clearUserIdTokensServiceTokens() throws MslEncodingException, MslCryptoException, MslException { final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, 1, 1); final ICryptoContext cryptoContext = new NullCryptoContext(); final String userIdA = USER_ID + "A"; final String userIdB = USER_ID + "B"; final UserIdToken userIdTokenA = MslTestUtils.getUserIdToken(ctx, masterToken, 1, MockEmailPasswordAuthenticationFactory.USER); final UserIdToken userIdTokenB = MslTestUtils.getUserIdToken(ctx, masterToken, 2, MockEmailPasswordAuthenticationFactory.USER); final Set<ServiceToken> masterBoundServiceTokens = MslTestUtils.getMasterBoundServiceTokens(ctx, masterToken); final Set<ServiceToken> serviceTokensA = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdTokenA); final Set<ServiceToken> serviceTokensB = MslTestUtils.getUserBoundServiceTokens(ctx, masterToken, userIdTokenB); store.setCryptoContext(masterToken, cryptoContext); store.addUserIdToken(userIdA, userIdTokenA); store.addUserIdToken(userIdB, userIdTokenB); store.addServiceTokens(masterBoundServiceTokens); store.addServiceTokens(serviceTokensA); store.addServiceTokens(serviceTokensB); // Only the master token bound service tokens should be left. store.clearUserIdTokens(); final Set<ServiceToken> storedServiceTokens = store.getServiceTokens(masterToken, userIdTokenB); assertEquals(masterBoundServiceTokens, storedServiceTokens); } /** * Service token add/remove stress test runner. * * Randomly adds or removes service tokens in combinations of unbound, * master token bound, and user ID token bound Also iterates through the * service tokens. */ private static class ServiceTokenStressor implements Runnable { /** * Create a new service token stressor. * * @param ctx MSL context. * @param store MSL store. * @param count the number of master token and user ID tokens to create * combinations of. */ public ServiceTokenStressor(final MslContext ctx, final MslStore store, final int count) { this.ctx = ctx; this.store = store; this.count = count; } /* (non-Javadoc) * @see java.lang.Runnable#run() */ @Override public void run() { final Random r = new Random(); try { for (int i = 0; i < 10 * count; ++i) { final int tokenIndex = r.nextInt(count); final MasterToken masterToken = MslTestUtils.getMasterToken(ctx, tokenIndex, 1); final long userId = r.nextInt(count); final UserIdToken userIdToken = MslTestUtils.getUserIdToken(ctx, masterToken, userId, MockEmailPasswordAuthenticationFactory.USER); final int option = r.nextInt(6); switch (option) { case 0: { final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, null, null); store.addServiceTokens(tokens); break; } case 1: { store.setCryptoContext(masterToken, new NullCryptoContext()); final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, null); store.addServiceTokens(tokens); break; } case 2: { store.setCryptoContext(masterToken, new NullCryptoContext()); store.addUserIdToken(USER_ID + userId, userIdToken); final Set<ServiceToken> tokens = MslTestUtils.getServiceTokens(ctx, masterToken, userIdToken); store.addServiceTokens(tokens); break; } case 3: { store.getServiceTokens(null, null); break; } case 4: { store.getServiceTokens(masterToken, null); break; } case 5: { store.getServiceTokens(masterToken, userIdToken); break; } } } } catch (final MslMasterTokenException e) { throw new MslInternalException("Unexpected master token exception.", e); } catch (final MslEncodingException e) { throw new MslInternalException("Unexpected master token encoding exception.", e); } catch (final MslCryptoException e) { throw new MslInternalException("Unexpected master token creation exception.", e); } catch (final MslException e) { throw new MslInternalException("Master token / user ID token service token query mismatch.", e); } } /** MSL context. */ private final MslContext ctx; /** MSL store. */ private final MslStore store; /** Number of master token and user ID token identities. */ private final int count; } @Test public void stressServiceTokens() throws InterruptedException { final ExecutorService service = Executors.newCachedThreadPool(); for (int i = 0; i < 10 * MAX_TOKENS; ++i) { service.execute(new ServiceTokenStressor(ctx, store, MAX_TOKENS)); } service.shutdown(); assertTrue(service.awaitTermination(STRESS_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS)); } /** MSL context. */ private static MslContext ctx; /** MSL store. */ private MslStore store; }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.phreak; import org.drools.core.common.BetaConstraints; import org.drools.core.common.InternalWorkingMemory; import org.drools.core.common.TupleSets; import org.drools.core.reteoo.BetaMemory; import org.drools.core.reteoo.BetaNode; import org.drools.core.reteoo.ExistsNode; import org.drools.core.reteoo.LeftTuple; import org.drools.core.reteoo.LeftTupleSink; import org.drools.core.reteoo.RightTuple; import org.drools.core.reteoo.TupleMemory; import org.drools.core.rule.ContextEntry; import org.drools.core.spi.PropagationContext; import org.drools.core.util.FastIterator; import static org.drools.core.phreak.PhreakJoinNode.updateChildLeftTuple; public class PhreakExistsNode { public void doNode(ExistsNode existsNode, LeftTupleSink sink, BetaMemory bm, InternalWorkingMemory wm, TupleSets<LeftTuple> srcLeftTuples, TupleSets<LeftTuple> trgLeftTuples, TupleSets<LeftTuple> stagedLeftTuples) { if (!existsNode.isRightInputIsRiaNode()) { doNormalNode(existsNode, sink, bm, wm, srcLeftTuples, trgLeftTuples, stagedLeftTuples); } else { PhreakSubnetworkNotExistsNode.doSubNetworkNode(existsNode, sink, bm, srcLeftTuples, trgLeftTuples, stagedLeftTuples); } } public void doNormalNode(ExistsNode existsNode, LeftTupleSink sink, BetaMemory bm, InternalWorkingMemory wm, TupleSets<LeftTuple> srcLeftTuples, TupleSets<LeftTuple> trgLeftTuples, TupleSets<LeftTuple> stagedLeftTuples) { TupleSets<RightTuple> srcRightTuples = bm.getStagedRightTuples().takeAll(); if (srcLeftTuples.getDeleteFirst() != null) { doLeftDeletes(bm, srcLeftTuples, trgLeftTuples, stagedLeftTuples); } if (srcLeftTuples.getUpdateFirst() != null ) { RuleNetworkEvaluator.doUpdatesExistentialReorderLeftMemory(bm, srcLeftTuples); } if ( srcRightTuples.getUpdateFirst() != null ) { RuleNetworkEvaluator.doUpdatesExistentialReorderRightMemory(bm, existsNode, srcRightTuples); // this also preserves the next rightTuple } if (srcRightTuples.getInsertFirst() != null) { // left deletes must come before right deletes. Otherwise right deletes could // stage a deletion, that is later deleted in the rightDelete, causing potential problems doRightInserts(existsNode, sink, bm, wm, srcRightTuples, trgLeftTuples); } if (srcRightTuples.getUpdateFirst() != null) { // must come after rightInserts and before rightDeletes, to avoid staging clash doRightUpdates(existsNode, sink, bm, wm, srcRightTuples, trgLeftTuples, stagedLeftTuples); } if (srcRightTuples.getDeleteFirst() != null) { // must come after rightUpdetes, to avoid staging clash doRightDeletes(existsNode, bm, wm, srcRightTuples, trgLeftTuples, stagedLeftTuples); } if (srcLeftTuples.getUpdateFirst() != null) { doLeftUpdates(existsNode, sink, bm, wm, srcLeftTuples, trgLeftTuples, stagedLeftTuples); } if (srcLeftTuples.getInsertFirst() != null) { doLeftInserts(existsNode, sink, bm, wm, srcLeftTuples, trgLeftTuples); } srcRightTuples.resetAll(); srcLeftTuples.resetAll(); } public void doLeftInserts(ExistsNode existsNode, LeftTupleSink sink, BetaMemory bm, InternalWorkingMemory wm, TupleSets<LeftTuple> srcLeftTuples, TupleSets<LeftTuple> trgLeftTuples) { TupleMemory ltm = bm.getLeftTupleMemory(); TupleMemory rtm = bm.getRightTupleMemory(); ContextEntry[] contextEntry = bm.getContext(); BetaConstraints constraints = existsNode.getRawConstraints(); for (LeftTuple leftTuple = srcLeftTuples.getInsertFirst(); leftTuple != null; ) { LeftTuple next = leftTuple.getStagedNext(); boolean useLeftMemory = RuleNetworkEvaluator.useLeftMemory(existsNode, leftTuple); constraints.updateFromTuple( contextEntry, wm, leftTuple ); // This method will also remove rightTuples that are from subnetwork where no leftmemory use used RuleNetworkEvaluator.findLeftTupleBlocker( existsNode, rtm, contextEntry, constraints, leftTuple, useLeftMemory ); if (leftTuple.getBlocker() != null) { // tuple is not blocked to propagate insertChildLeftTuple( sink, trgLeftTuples, leftTuple, leftTuple.getBlocker().getPropagationContext(),useLeftMemory ); } else if (useLeftMemory) { // LeftTuple is not blocked, so add to memory so other RightTuples can match ltm.add(leftTuple); } leftTuple.clearStaged(); leftTuple = next; } constraints.resetTuple( contextEntry ); } public void doRightInserts(ExistsNode existsNode, LeftTupleSink sink, BetaMemory bm, InternalWorkingMemory wm, TupleSets<RightTuple> srcRightTuples, TupleSets<LeftTuple> trgLeftTuples) { TupleMemory ltm = bm.getLeftTupleMemory(); TupleMemory rtm = bm.getRightTupleMemory(); ContextEntry[] contextEntry = bm.getContext(); BetaConstraints constraints = existsNode.getRawConstraints(); for (RightTuple rightTuple = srcRightTuples.getInsertFirst(); rightTuple != null; ) { RightTuple next = rightTuple.getStagedNext(); rtm.add(rightTuple); if ( ltm != null && ltm.size() > 0 ) { FastIterator it = existsNode.getLeftIterator( ltm ); constraints.updateFromFactHandle( contextEntry, wm, rightTuple.getFactHandleForEvaluation() ); for ( LeftTuple leftTuple = existsNode.getFirstLeftTuple( rightTuple, ltm, it ); leftTuple != null; ) { // preserve next now, in case we remove this leftTuple LeftTuple temp = (LeftTuple) it.next( leftTuple ); if ( leftTuple.getStagedType() == LeftTuple.UPDATE ) { // ignore, as it will get processed via left iteration. Children cannot be processed twice leftTuple = temp; continue; } // we know that only unblocked LeftTuples are still in the memory if ( constraints.isAllowedCachedRight( contextEntry, leftTuple ) ) { leftTuple.setBlocker( rightTuple ); rightTuple.addBlocked( leftTuple ); ltm.remove( leftTuple ); insertChildLeftTuple( sink, trgLeftTuples, leftTuple, rightTuple.getPropagationContext(), true ); } leftTuple = temp; } } rightTuple.clearStaged(); rightTuple = next; } constraints.resetFactHandle( contextEntry ); } public void doLeftUpdates(ExistsNode existsNode, LeftTupleSink sink, BetaMemory bm, InternalWorkingMemory wm, TupleSets<LeftTuple> srcLeftTuples, TupleSets<LeftTuple> trgLeftTuples, TupleSets<LeftTuple> stagedLeftTuples) { TupleMemory ltm = bm.getLeftTupleMemory(); TupleMemory rtm = bm.getRightTupleMemory(); ContextEntry[] contextEntry = bm.getContext(); BetaConstraints constraints = existsNode.getRawConstraints(); boolean leftUpdateOptimizationAllowed = existsNode.isLeftUpdateOptimizationAllowed(); for (LeftTuple leftTuple = srcLeftTuples.getUpdateFirst(); leftTuple != null; ) { LeftTuple next = leftTuple.getStagedNext(); FastIterator rightIt = existsNode.getRightIterator(rtm); RightTuple firstRightTuple = existsNode.getFirstRightTuple(leftTuple, rtm, null, rightIt); // If in memory, remove it, because we'll need to add it anyway if it's not blocked, to ensure iteration order RightTuple blocker = leftTuple.getBlocker(); if (blocker == null) { if (leftTuple.getMemory() != null) { // memory can be null, if blocker was deleted in same do loop ltm.remove(leftTuple); } } else { // check if we changed bucket if (rtm.isIndexed() && !rightIt.isFullIterator()) { // if newRightTuple is null, we assume there was a bucket change and that bucket is empty if (firstRightTuple == null || firstRightTuple.getMemory() != blocker.getMemory()) { // we changed bucket, so blocker no longer blocks blocker.removeBlocked(leftTuple); blocker = null; } } } constraints.updateFromTuple(contextEntry, wm, leftTuple); if ( !leftUpdateOptimizationAllowed && blocker != null ) { blocker.removeBlocked(leftTuple); blocker = null; } // if we where not blocked before (or changed buckets), or the previous blocker no longer blocks, then find the next blocker if (blocker == null || !constraints.isAllowedCachedLeft(contextEntry, blocker.getFactHandleForEvaluation())) { if (blocker != null) { // remove previous blocker if it exists, as we know it doesn't block any more blocker.removeBlocked(leftTuple); } // find first blocker, because it's a modify, we need to start from the beginning again for (RightTuple newBlocker = firstRightTuple; newBlocker != null; newBlocker = (RightTuple) rightIt.next(newBlocker)) { if (constraints.isAllowedCachedLeft( contextEntry, newBlocker.getFactHandleForEvaluation() )) { leftTuple.setBlocker( newBlocker ); newBlocker.addBlocked( leftTuple ); break; } } } if (leftTuple.getBlocker() == null) { // not blocked ltm.add(leftTuple); // add to memory so other fact handles can attempt to match if (leftTuple.getFirstChild() != null) { // no need to update pctx, as no right available, and pctx will exist on a parent LeftTuple anyway RuleNetworkEvaluator.unlinkAndDeleteChildLeftTuple( leftTuple.getFirstChild(), trgLeftTuples, stagedLeftTuples ); } // with no previous children. do nothing. } else if (leftTuple.getFirstChild() == null) { // blocked, with no previous children, insert insertChildLeftTuple( sink, trgLeftTuples, leftTuple, leftTuple.getBlocker().getPropagationContext(), true ); } else { // blocked, with previous children, modify LeftTuple childLeftTuple = leftTuple.getFirstChild(); while (childLeftTuple != null) { childLeftTuple.setPropagationContext(leftTuple.getBlocker().getPropagationContext()); updateChildLeftTuple(childLeftTuple, stagedLeftTuples, trgLeftTuples); childLeftTuple.reAddRight(); childLeftTuple = childLeftTuple.getHandleNext(); } } leftTuple.clearStaged(); leftTuple = next; } constraints.resetTuple( contextEntry ); } public void doRightUpdates(ExistsNode existsNode, LeftTupleSink sink, BetaMemory bm, InternalWorkingMemory wm, TupleSets<RightTuple> srcRightTuples, TupleSets<LeftTuple> trgLeftTuples, TupleSets<LeftTuple> stagedLeftTuples) { TupleMemory ltm = bm.getLeftTupleMemory(); TupleMemory rtm = bm.getRightTupleMemory(); ContextEntry[] contextEntry = bm.getContext(); BetaConstraints constraints = existsNode.getRawConstraints(); boolean iterateFromStart = existsNode.isIndexedUnificationJoin() || rtm.getIndexType().isComparison(); for (RightTuple rightTuple = srcRightTuples.getUpdateFirst(); rightTuple != null; ) { RightTuple next = rightTuple.getStagedNext(); if ( ltm != null && ltm.size() > 0 ) { FastIterator leftIt = existsNode.getLeftIterator( ltm ); LeftTuple firstLeftTuple = existsNode.getFirstLeftTuple( rightTuple, ltm, leftIt ); constraints.updateFromFactHandle( contextEntry, wm, rightTuple.getFactHandleForEvaluation() ); // first process non-blocked tuples, as we know only those ones are in the left memory. for ( LeftTuple leftTuple = firstLeftTuple; leftTuple != null; ) { // preserve next now, in case we remove this leftTuple LeftTuple temp = (LeftTuple) leftIt.next( leftTuple ); if ( leftTuple.getStagedType() == LeftTuple.UPDATE ) { // ignore, as it will get processed via left iteration. Children cannot be processed twice leftTuple = temp; continue; } // we know that only unblocked LeftTuples are still in the memory if ( constraints.isAllowedCachedRight( contextEntry, leftTuple ) ) { leftTuple.setBlocker( rightTuple ); rightTuple.addBlocked( leftTuple ); // this is now blocked so remove from memory ltm.remove( leftTuple ); // subclasses like ForallNotNode might override this propagation insertChildLeftTuple( sink, trgLeftTuples, leftTuple, rightTuple.getPropagationContext(), true ); } leftTuple = temp; } } LeftTuple firstBlocked = rightTuple.getTempBlocked(); if ( firstBlocked != null ) { RightTuple rootBlocker = rightTuple.getTempNextRightTuple(); if ( rootBlocker == null ) { iterateFromStart = true; } FastIterator rightIt = existsNode.getRightIterator( rtm ); // iterate all the existing previous blocked LeftTuples for ( LeftTuple leftTuple = firstBlocked; leftTuple != null; ) { LeftTuple temp = leftTuple.getBlockedNext(); leftTuple.clearBlocker(); // must null these as we are re-adding them to the list if ( leftTuple.getStagedType() == LeftTuple.UPDATE ) { // ignore, as it will get processed via left iteration. Children cannot be processed twice // but need to add it back into list first leftTuple.setBlocker( rightTuple ); rightTuple.addBlocked( leftTuple ); leftTuple = temp; continue; } constraints.updateFromTuple( contextEntry, wm, leftTuple ); if ( iterateFromStart ) { rootBlocker = existsNode.getFirstRightTuple( leftTuple, rtm, null, rightIt ); } // we know that older tuples have been checked so continue next for ( RightTuple newBlocker = rootBlocker; newBlocker != null; newBlocker = (RightTuple) rightIt.next( newBlocker ) ) { // cannot select a RightTuple queued in the delete list // There may be UPDATE RightTuples too, but that's ok. They've already been re-added to the correct bucket, safe to be reprocessed. if ( leftTuple.getStagedType() != LeftTuple.DELETE && newBlocker.getStagedType() != LeftTuple.DELETE && constraints.isAllowedCachedLeft( contextEntry, newBlocker.getFactHandleForEvaluation() ) ) { leftTuple.setBlocker( newBlocker ); newBlocker.addBlocked( leftTuple ); break; } } if ( leftTuple.getBlocker() == null ) { // was previous blocked and not in memory, so add if (ltm != null) { ltm.add( leftTuple ); } LeftTuple childLeftTuple = leftTuple.getFirstChild(); if ( childLeftTuple != null ) { childLeftTuple.setPropagationContext( rightTuple.getPropagationContext() ); RuleNetworkEvaluator.unlinkAndDeleteChildLeftTuple( childLeftTuple, trgLeftTuples, stagedLeftTuples ); } } leftTuple = temp; } } rightTuple.clearStaged(); rightTuple = next; } constraints.resetFactHandle(contextEntry); } public void doLeftDeletes(BetaMemory bm, TupleSets<LeftTuple> srcLeftTuples, TupleSets<LeftTuple> trgLeftTuples, TupleSets<LeftTuple> stagedLeftTuples) { TupleMemory ltm = bm.getLeftTupleMemory(); for (LeftTuple leftTuple = srcLeftTuples.getDeleteFirst(); leftTuple != null; ) { LeftTuple next = leftTuple.getStagedNext(); RightTuple blocker = leftTuple.getBlocker(); if (blocker == null) { if (leftTuple.getMemory() != null) { // it may have been staged and never actually added ltm.remove(leftTuple); } } else { if (leftTuple.getFirstChild() != null) { // no need to update pctx, as no right available, and pctx will exist on a parent LeftTuple anyway RuleNetworkEvaluator.unlinkAndDeleteChildLeftTuple( leftTuple.getFirstChild(), trgLeftTuples, stagedLeftTuples ); } blocker.removeBlocked(leftTuple); } leftTuple.clearStaged(); leftTuple = next; } } public void doRightDeletes(ExistsNode existsNode, BetaMemory bm, InternalWorkingMemory wm, TupleSets<RightTuple> srcRightTuples, TupleSets<LeftTuple> trgLeftTuples, TupleSets<LeftTuple> stagedLeftTuples) { TupleMemory rtm = bm.getRightTupleMemory(); TupleMemory ltm = bm.getLeftTupleMemory(); ContextEntry[] contextEntry = bm.getContext(); BetaConstraints constraints = existsNode.getRawConstraints(); for (RightTuple rightTuple = srcRightTuples.getDeleteFirst(); rightTuple != null; ) { RightTuple next = rightTuple.getStagedNext(); FastIterator it = existsNode.getRightIterator(rtm); boolean useComparisonIndex = rtm.getIndexType().isComparison(); RightTuple rootBlocker = useComparisonIndex ? null : (RightTuple) it.next(rightTuple); if (rightTuple.getMemory() != null) { // it may have been staged and never actually added rtm.remove(rightTuple); } if (rightTuple.getBlocked() != null) { for (LeftTuple leftTuple = rightTuple.getBlocked(); leftTuple != null; ) { LeftTuple temp = leftTuple.getBlockedNext(); leftTuple.clearBlocker(); if (leftTuple.getStagedType() == LeftTuple.UPDATE) { // ignore, as it will get processed via left iteration. Children cannot be processed twice leftTuple = temp; continue; } constraints.updateFromTuple(contextEntry, wm, leftTuple); if (useComparisonIndex) { rootBlocker = (RightTuple) rtm.getFirst(leftTuple); } // we know that older tuples have been checked so continue previously for (RightTuple newBlocker = rootBlocker; newBlocker != null; newBlocker = (RightTuple) it.next(newBlocker)) { if (!newBlocker.isDeleted() && constraints.isAllowedCachedLeft(contextEntry, newBlocker.getFactHandleForEvaluation())) { leftTuple.setBlocker(newBlocker); newBlocker.addBlocked(leftTuple); break; } } if (leftTuple.getBlocker() == null) { // was previous blocked and not in memory, so add ltm.add(leftTuple); LeftTuple childLeftTuple = leftTuple.getFirstChild(); if (childLeftTuple != null) { childLeftTuple.setPropagationContext(rightTuple.getPropagationContext()); RuleNetworkEvaluator.unlinkAndDeleteChildLeftTuple( childLeftTuple, trgLeftTuples, stagedLeftTuples ); } } leftTuple = temp; } } rightTuple.setBlocked(null); rightTuple.clearStaged(); rightTuple = next; } } private static void insertChildLeftTuple( LeftTupleSink sink, TupleSets<LeftTuple> trgLeftTuples, LeftTuple leftTuple, PropagationContext pctx, boolean useLeftMemory ) { if (!leftTuple.isExpired()) { trgLeftTuples.addInsert( sink.createLeftTuple( leftTuple, sink, pctx, useLeftMemory ) ); } } }
/******************************************************************************* * Copyright 2011 The Regents of the University of California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.ohmage.reminders.types.time; import android.app.AlertDialog; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.os.Bundle; import android.preference.CheckBoxPreference; import android.preference.Preference; import android.preference.Preference.OnPreferenceChangeListener; import android.preference.Preference.OnPreferenceClickListener; import android.preference.PreferenceActivity; import android.preference.PreferenceScreen; import android.view.View; import android.widget.Button; import android.widget.Toast; import org.ohmage.reminders.R; import org.ohmage.reminders.base.Actions; import org.ohmage.reminders.base.TriggerActionDesc; import org.ohmage.reminders.config.TrigUserConfig; import org.ohmage.reminders.ui.ActionSelectorView; import org.ohmage.reminders.ui.TriggerListActivity; import org.ohmage.reminders.utils.TimePickerPreference; import java.util.LinkedHashMap; public class TimeTrigEditActivity extends PreferenceActivity implements View.OnClickListener, OnPreferenceClickListener, OnPreferenceChangeListener, DialogInterface.OnMultiChoiceClickListener, DialogInterface.OnClickListener { private static final String TAG = "TimeTrigEditActivity"; public static final String KEY_TRIG_DESC = "trig_desc"; public static final String KEY_ACT_DESC = "act_desc"; public static final String KEY_TRIG_ID = "trig_id"; public static final String KEY_ADMIN_MODE = "admin_mode"; private static final String KEY_SAVE_DAYS = "days"; private static final String KEY_SAVE_REPEAT_STATUS = "repeat_status"; private static final String PREF_KEY_TRIGGER_TIME = "trigger_time"; // private static final String PREF_KEY_RANDOMIZE = // "randomize_trigger_time"; private static final String PREF_KEY_ENABLE_RANGE = "enable_time_range"; private static final String PREF_KEY_START_TIME = "interval_start_time"; private static final String PREF_KEY_END_TIME = "interval_end_time"; private static final String PREF_KEY_REPEAT_DAYS = "repeat_days"; private static final String PREF_KEY_ACTIONS = "actions"; private static final int DIALOG_ID_REPEAT_SEL = 0; private static final int DIALOG_ID_INVALID_TIME_ALERT = 1; private static final int DIALOG_ID_ACTION_SEL = 2; private static final int DIALOG_ID_NO_SURVEYS_SELECTED = 3; private TimeTrigDesc mTrigDesc; private TriggerActionDesc mActDesc; private String[] mDays; private boolean[] mRepeatStatus; private boolean mAdminMode = false; private AlertDialog mRepeatDialog = null; private boolean[] mActSelected = null; private Actions mActions; public interface ExitListener { public void onDone(Context context, int trigId, String trigDesc, String actDesc); } private static ExitListener mExitListener = null; private int mTrigId = 0; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); addPreferencesFromResource(R.xml.trig_time_edit_preferences); setContentView(R.layout.trigger_editor); mTrigDesc = new TimeTrigDesc(); mActDesc = new TriggerActionDesc(); String[] selectParams = getIntent().getStringArrayExtra(TriggerListActivity.EXTRA_ACTIONS); mActions = new Actions(this, selectParams); PreferenceScreen screen = getPreferenceScreen(); int prefCount = screen.getPreferenceCount(); for (int i = 0; i < prefCount; i++) { screen.getPreference(i).setOnPreferenceClickListener(this); screen.getPreference(i).setOnPreferenceChangeListener(this); } ((Button) findViewById(R.id.trig_edit_done)).setOnClickListener(this); ((Button) findViewById(R.id.trig_edit_cancel)).setOnClickListener(this); String config = null; String action = null; if (savedInstanceState != null) { config = savedInstanceState.getString(KEY_TRIG_DESC); action = savedInstanceState.getString(KEY_ACT_DESC); } else { config = getIntent().getStringExtra(KEY_TRIG_DESC); action = getIntent().getStringExtra(KEY_ACT_DESC); } mAdminMode = getIntent().getBooleanExtra(KEY_ADMIN_MODE, false); if (config != null) { mTrigId = getIntent().getIntExtra(KEY_TRIG_ID, 0); if (mTrigDesc.loadString(config) && mActDesc.loadString(action)) { initializeGUI(); } else { getPreferenceScreen().setEnabled(false); Toast.makeText(this, R.string.trigger_invalid_settings, Toast.LENGTH_SHORT).show(); } } if (savedInstanceState == null) { LinkedHashMap<String, Boolean> repeatList = mTrigDesc.getRepeat(); mDays = repeatList.keySet().toArray(new String[repeatList.size()]); mRepeatStatus = new boolean[mDays.length]; updateRepeatStatusArray(); // if there are any preselected actions specified when the activity // is first created // and there's currently nothing in the action description, load the // selected options // into the action description as if they were previously selected if (mActDesc.getCount() <= 0 && getIntent().hasExtra(TriggerListActivity.KEY_PRESELECTED_ACTIONS)) { String[] preselectedActions = getIntent().getStringArrayExtra( TriggerListActivity.KEY_PRESELECTED_ACTIONS); for (int i = 0; i < preselectedActions.length; ++i) { mActDesc.addSurvey(preselectedActions[i]); } updateActionsPrefStatus(); } } else { mDays = savedInstanceState.getStringArray(KEY_SAVE_DAYS); mRepeatStatus = savedInstanceState.getBooleanArray(KEY_SAVE_REPEAT_STATUS); } } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); udateTriggerDesc(); outState.putString(KEY_TRIG_DESC, mTrigDesc.toString()); outState.putString(KEY_ACT_DESC, mActDesc.toString()); outState.putStringArray(KEY_SAVE_DAYS, mDays); outState.putBooleanArray(KEY_SAVE_REPEAT_STATUS, mRepeatStatus); } public static void setOnExitListener(ExitListener listener) { mExitListener = listener; } @SuppressWarnings("unused") private void initializeGUI() { TimePickerPreference trigTimePref = (TimePickerPreference) getPreferenceScreen() .findPreference(PREF_KEY_TRIGGER_TIME); CheckBoxPreference rangePref = (CheckBoxPreference) findPreference(PREF_KEY_ENABLE_RANGE); // CheckBoxPreference randPref = (CheckBoxPreference) // findPreference(PREF_KEY_RANDOMIZE); TimePickerPreference startPref = (TimePickerPreference) getPreferenceScreen() .findPreference(PREF_KEY_START_TIME); TimePickerPreference endPref = (TimePickerPreference) getPreferenceScreen().findPreference( PREF_KEY_END_TIME); if (mTrigDesc.isRangeEnabled()) { rangePref.setChecked(true); /* * if(mTrigDesc.isRandomized() || * SharedPreferencesHelper.TRIGGERS_TIMERANGE_ALWAYS_RANDOM) { * randPref.setChecked(true); } else { * trigTimePref.setTime(mTrigDesc.getTriggerTime()); } */ startPref.setTime(mTrigDesc.getRangeStart()); endPref.setTime(mTrigDesc.getRangeEnd()); } else { trigTimePref.setTime(mTrigDesc.getTriggerTime()); } updateTriggerTimePrefStatus(); updateRepeatPrefStatus(); updateActionsPrefStatus(); if (!mAdminMode && !TrigUserConfig.editTimeTriggerTime) { trigTimePref.setEnabled(false); } if (!mAdminMode && !TrigUserConfig.editTimeTriggerRange) { rangePref.setEnabled(false); } if (!mAdminMode && !TrigUserConfig.editTimeTriggerRange) { // randPref.setEnabled(false); } if (!mAdminMode && !TrigUserConfig.editTimeTriggerRange) { startPref.setEnabled(false); } if (!mAdminMode && !TrigUserConfig.editTimeTriggerRange) { endPref.setEnabled(false); } Preference repeatPref = getPreferenceScreen().findPreference(PREF_KEY_REPEAT_DAYS); if (!mAdminMode && !TrigUserConfig.editTimeTriggerRepeat) { repeatPref.setEnabled(false); } Preference actionsPref = getPreferenceScreen().findPreference(PREF_KEY_ACTIONS); if (!mAdminMode && !TrigUserConfig.editTriggerActions) { actionsPref.setEnabled(false); } ((Button) findViewById(R.id.trig_edit_done)).setEnabled(mAdminMode || TrigUserConfig.editTimeTrigger); } @SuppressWarnings("unused") private void udateTriggerDesc() { CheckBoxPreference rangePref = (CheckBoxPreference) findPreference(PREF_KEY_ENABLE_RANGE); // CheckBoxPreference randPref = (CheckBoxPreference) // findPreference(PREF_KEY_RANDOMIZE); TimePickerPreference timePref = (TimePickerPreference) getPreferenceScreen() .findPreference(PREF_KEY_TRIGGER_TIME); TimePickerPreference startPref = (TimePickerPreference) getPreferenceScreen() .findPreference(PREF_KEY_START_TIME); TimePickerPreference endPref = (TimePickerPreference) getPreferenceScreen().findPreference( PREF_KEY_END_TIME); if (rangePref.isChecked()) { mTrigDesc.setRangeEnabled(true); /* * if(randPref.isChecked() || * SharedPreferencesHelper.TRIGGERS_TIMERANGE_ALWAYS_RANDOM) { * mTrigDesc.setRandomized(true); } else { * mTrigDesc.setRandomized(false); * mTrigDesc.setTriggerTime(timePref.getTime()); } */ // FAISAL: now we're always random when rangePref is true mTrigDesc.setRandomized(true); mTrigDesc.setRangeStart(startPref.getTime()); mTrigDesc.setRangeEnd(endPref.getTime()); } else { mTrigDesc.setRangeEnabled(false); mTrigDesc.setRandomized(false); mTrigDesc.setTriggerTime(timePref.getTime()); } } @Override public void onClick(View v) { int id = v.getId(); if (id == R.id.trig_edit_done) { if (mExitListener != null) { udateTriggerDesc(); if (!mTrigDesc.validate()) { // if the time settings are invalid, tell the user and abort showDialog(DIALOG_ID_INVALID_TIME_ALERT); return; } else if (mActDesc.getSurveys().length <= 0) { // if no surveys were selected, tell the user and abort showDialog(DIALOG_ID_NO_SURVEYS_SELECTED); return; } // since we didn't hit any issues, we must be good; invoke the // exit handler which stores the trigger mExitListener.onDone(this, mTrigId, mTrigDesc.toString(), mActDesc.toString()); } } finish(); } private void updateTriggerTimePrefStatus() { TimePickerPreference trigTimePref = (TimePickerPreference) getPreferenceScreen() .findPreference(PREF_KEY_TRIGGER_TIME); CheckBoxPreference rangePref = (CheckBoxPreference) findPreference(PREF_KEY_ENABLE_RANGE); // CheckBoxPreference randPref = (CheckBoxPreference) // findPreference(PREF_KEY_RANDOMIZE); // if triggers_timerange_always_random is set, then make randPref = // rangePref // randPref.setChecked(SharedPreferencesHelper.TRIGGERS_TIMERANGE_ALWAYS_RANDOM // && rangePref.isChecked()); if (rangePref.isChecked()) { trigTimePref.setSummary(R.string.trigger_time_randomized); trigTimePref.setEnabled(false); } else { trigTimePref.setSummary(trigTimePref.getTime().toString()); trigTimePref.setEnabled(true); } } private void updateRepeatPrefStatus() { Preference repeatPref = getPreferenceScreen().findPreference(PREF_KEY_REPEAT_DAYS); repeatPref.setSummary(mTrigDesc.getRepeatDescription()); } private void updateRepeatStatusArray() { LinkedHashMap<String, Boolean> repeatList = mTrigDesc.getRepeat(); for (int i = 0; i < mDays.length; i++) { mRepeatStatus[i] = repeatList.get(mDays[i]); } } private void updateActionsPrefStatus() { Preference actionsPref = getPreferenceScreen().findPreference(PREF_KEY_ACTIONS); if (mActDesc.getSurveys().length > 0) { StringBuilder actions = new StringBuilder(); for (String id : mActDesc.getSurveys()) { if (actions.length() != 0) { actions.append(", "); } actions.append(mActions.getName(id)); } actionsPref.setSummary(actions.toString()); } else { actionsPref.setSummary(R.string.trigger_no_actions); } } private String stringArrayToString(String[] strings) { if (strings.length == 0) { return ""; } String string = ""; for (String s : strings) { string = string.concat(s).concat(", "); } return string.substring(0, string.length() - 2); } private Dialog createRepeatSelDialog() { updateRepeatStatusArray(); mRepeatDialog = new AlertDialog.Builder(this).setTitle(R.string.trigger_time_select_days) .setPositiveButton(android.R.string.ok, this).setNegativeButton(android.R.string.cancel, this) .setMultiChoiceItems(mDays, mRepeatStatus, this).create(); return mRepeatDialog; } private Dialog createInvalidTimeAlert() { return new AlertDialog.Builder(this).setTitle(R.string.trigger_time_invalid_settings) .setNegativeButton(android.R.string.cancel, null) .setMessage(R.string.trigger_time_invalid_text).create(); } private Dialog createNoSurveysSelectedAlert() { return new AlertDialog.Builder(this).setTitle(R.string.trigger_time_no_survey_selected) .setNegativeButton(android.R.string.cancel, null) .setMessage(R.string.trigger_time_no_survey_selected_text).create(); } @Override protected Dialog onCreateDialog(int id) { switch (id) { case DIALOG_ID_REPEAT_SEL: return createRepeatSelDialog(); case DIALOG_ID_INVALID_TIME_ALERT: return createInvalidTimeAlert(); case DIALOG_ID_NO_SURVEYS_SELECTED: return createNoSurveysSelectedAlert(); case DIALOG_ID_ACTION_SEL: return createEditActionDialog(); } return null; } @Override public boolean onPreferenceClick(Preference pref) { if (pref.getKey().equals(PREF_KEY_ENABLE_RANGE)) { updateTriggerTimePrefStatus(); } else if (pref.getKey().equals(PREF_KEY_REPEAT_DAYS)) { removeDialog(DIALOG_ID_REPEAT_SEL); showDialog(DIALOG_ID_REPEAT_SEL); } else if (pref.getKey().equals(PREF_KEY_ACTIONS)) { removeDialog(DIALOG_ID_ACTION_SEL); showDialog(DIALOG_ID_ACTION_SEL); } return false; } @Override public boolean onPreferenceChange(Preference preference, Object newValue) { return true; } @Override public void onClick(DialogInterface dialog, int which, boolean isChecked) { mRepeatStatus[which] = isChecked; int repeatCount = 0; for (int i = 0; i < mRepeatStatus.length; i++) { if (mRepeatStatus[i]) { repeatCount++; } } if (mRepeatDialog != null) { if (mRepeatDialog.isShowing()) { mRepeatDialog.getButton(AlertDialog.BUTTON_POSITIVE).setEnabled(repeatCount != 0); } } } @Override public void onClick(DialogInterface dialog, int which) { if (which == DialogInterface.BUTTON_POSITIVE) { for (int i = 0; i < mDays.length; i++) { mTrigDesc.setRepeatStatus(mDays[i], mRepeatStatus[i]); } } dialog.dismiss(); updateRepeatPrefStatus(); } private Dialog createEditActionDialog() { if(mActions.size() == 0) { Toast.makeText(this, R.string.no_surveys, Toast.LENGTH_SHORT).show(); return null; } if (mActSelected == null) { mActSelected = new boolean[mActions.size()]; for (int i = 0; i < mActSelected.length; i++) { mActSelected[i] = mActDesc.hasSurvey(mActions.getId(i)); } } AlertDialog.Builder builder = new AlertDialog.Builder(this) .setTitle(R.string.trigger_select_actions).setNegativeButton(android.R.string.cancel, null) .setView(new ActionSelectorView(getBaseContext(), mActions.getNames(), mActSelected)); /* * AlertDialog.Builder builder = new AlertDialog.Builder(this) * .setTitle(R.string.trigger_select_actions) * .setNegativeButton(R.string.cancel, null) * .setMultiChoiceItems(mActions, mActSelected, new * DialogInterface.OnMultiChoiceClickListener() { * @Override public void onClick(DialogInterface dialog, int which, * boolean isChecked) { mActSelected[which] = isChecked; } }); */ if (mAdminMode || TrigUserConfig.editTriggerActions) { builder.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { mActDesc.clearAllSurveys(); for (int i = 0; i < mActSelected.length; i++) { if (mActSelected[i]) { mActDesc.addSurvey(mActions.getId(i)); } } dialog.dismiss(); updateActionsPrefStatus(); // handleActionSelection(mDialogTrigId, desc); } }); } return builder.create(); } /* * public void handleActionSelection(int trigId, TriggerActionDesc desc) { * String prevActDesc = mDb.getActionDescription(trigId); TriggerActionDesc * prevDesc = new TriggerActionDesc(); prevDesc.loadString(prevActDesc); * mDb.updateActionDescription(trigId, desc.toString()); mCursor.requery(); * Notifier.refreshNotification(this, mCampaignUrn, true); * if(desc.getCount() == 0 && prevDesc.getCount() !=0) { * toggleTrigger(trigId, false); } if(desc.getCount() != 0 && * prevDesc.getCount() == 0) { toggleTrigger(trigId, true); } } */ }
package sagex.phoenix.remote; import java.io.File; import java.lang.reflect.InvocationTargetException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Vector; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import javax.servlet.http.HttpServletRequest; import sagex.SageAPI; import sagex.api.FavoriteAPI; import sagex.api.Utility; /** * Direct copy of Nielm's Search but modified to use Sagex apis */ public class Search { protected static String charset = "UTF-8"; public static final Long BEGINNING_OF_TIME = new Long(0); public static final Long END_OF_TIME = new Long(Long.MAX_VALUE); public static final Long NOW = null; public static final String SEARCH_TYPE_AIRINGS = "Airings"; public static final String SEARCH_TYPE_MEDIA_FILES = "MediaFiles"; public static final String SEARCH_TYPE_TV_FILES = "TVFiles"; private String searchString = ""; private boolean exactTitle = false; private String favoriteId = null; private String searchType = SEARCH_TYPE_AIRINGS; private boolean video = true; private boolean dvd = true; private boolean music = false; private boolean picture = false; private String[] categories = null; private String[] channels = null; private String[] fields = null; private String filename = null; private boolean regex = false; private boolean casesensitive = false; private String watched = null; private String dontlike = null; private String favorite = null; private String firstRuns = null; private String hdtv = null; private String archived = null; private String manrec = null; private String autodelete = null; private String partials = null; private String timeRange = null; private String sort1 = null; private String sort2 = null; private Long starttime = NOW; private Long endtime = END_OF_TIME; public Search() { } /** * @return the archived */ public String getArchived() { if (archived == null) { return "any"; } else { return archived; } } public String getManRec() { if (manrec == null) { return "any"; } else { return manrec; } } public void setManRec(String manrec) { this.manrec = manrec; } public void setArchived(String archived) { this.archived = archived; } /** * @return the autodelete */ public String getAutodelete() { if (autodelete == null) { return "any"; } else { return autodelete; } } /** * @param autodelete the autodelete to set */ public void setAutodelete(String autodelete) { this.autodelete = autodelete; } /** * @return if filename search is case sensitive */ public boolean isCaseSensitive() { return casesensitive; } /** * @param casesensitive filename search is case sensitive */ public void setCaseSensitive(boolean casesensitive) { this.casesensitive = casesensitive; } /** * @return the categories */ public String[] getCategories() { return categories; } /** * @param categories the categories to set */ public void setCategories(String[] categories) { this.categories = categories; } public String getSearchName() { StringBuffer searchName = new StringBuffer(); if (searchString != null) { // search string specified if (searchString.length() == 0) { // 0-length search string -- search for all searchName.append("All "); } // search string given: if (isSearchTypeAirings() || isSearchTypeMediaFiles() || isSearchTypeTVFiles()) { if (isSearchTypeAirings()) { searchName.append("Airings "); } else if (isSearchTypeMediaFiles()) { searchName.append("Imported "); boolean addcomma = false; if (isVideo()) { searchName.append("Videos "); addcomma = true; } if (isDVD()) { if (addcomma) searchName.append(", "); searchName.append("DVDs"); addcomma = true; } if (isMusic()) { if (addcomma) searchName.append(", "); searchName.append("Music"); addcomma = true; } if (isPicture()) { if (addcomma) searchName.append(", "); searchName.append("Pictures "); addcomma = true; } } else if (isSearchTypeTVFiles()) { searchName.append("Recordings "); } searchName.append("matching \""); searchName.append(searchString); searchName.append('"'); } else { searchName.append("Unknown search type"); } } else if (favoriteId != null) { Object favorite = null; try { favorite = FavoriteAPI.GetFavoriteForID(Integer.parseInt(getFavoriteId())); if (favorite != null) { if (isSearchTypeTVFiles()) { searchName.append("Recordings of Favorite: \""); } else { searchName.append("Airings of Favorite: \""); } searchName.append(FavoriteAPI.GetFavoriteDescription(favorite)); searchName.append('"'); } } catch (Exception e) { searchName.append("Favorite "); } } else { searchName.append("Unknown search type"); } // TODO handle filtering return searchName.toString(); } /** * @return the channels */ public String[] getChannels() { return channels; } /** * @param channels the channels to set */ public void setChannels(String[] channels) { this.channels = channels; } /** * @return the dontlike */ public String getDontlike() { if (dontlike == null) { return "any"; } else { return dontlike; } } /** * @param dontlike the dontlike to set */ public void setDontlike(String dontlike) { this.dontlike = dontlike; } /** * @return if dvd is the imported media file type */ public boolean isDVD() { return dvd; } /** * @param dvd search for imported dvds */ public void setDVD(boolean dvd) { this.dvd = dvd; } /** * @return the endtime */ public Long getEndtime() { if (endtime == NOW) { return new Long(new Date().getTime()); } return endtime; } /** * @param endtime the endtime to set */ public void setEndtime(Long endtime) { this.endtime = endtime; } /** * @return if search string is matched exactly */ public boolean isExactTitle() { return exactTitle; } /** * @param exactTitle match search string exactly */ public void setExactTitle(boolean exactTitle) { this.exactTitle = exactTitle; } /** * @return the favorite */ public String getFavorite() { if (favorite == null) { return "any"; } else { return favorite; } } /** * @param favorite the favorite to set */ public void setFavorite(String favorite) { this.favorite = favorite; } /** * @return the favoriteId */ public String getFavoriteId() { return favoriteId; } /** * @param favoriteId the favoriteId to set */ public void setFavoriteId(String favoriteId) { this.favoriteId = favoriteId; } /** * @return the fields */ public String[] getFields() { return fields; } /** * @param fields the fields to set */ public void setFields(String[] fields) { this.fields = fields; } /** * @return the filename */ public String getFilename() { return filename; } /** * @param filename the filename to set */ public void setFilename(String filename) { this.filename = filename; } /** * @return the firstRuns */ public String getFirstRuns() { if (firstRuns == null) { return "any"; } else { return firstRuns; } } /** * @param hdtv the hdtv to set */ public void setHDTV(String hdtv) { this.hdtv = hdtv; } /** * @return the hdtv */ public String getHDTV() { if (hdtv == null) { return "any"; } else { return hdtv; } } /** * @param firstRuns the firstRuns to set */ public void setFirstRuns(String firstRuns) { this.firstRuns = firstRuns; } /** * @return if music is the imported media file type */ public boolean isMusic() { return music; } /** * @param music search for imported music */ public void setMusic(boolean music) { this.music = music; } /** * @return the partials */ public String getPartials() { if (partials == null) { return "none"; } else { return partials; } } /** * @param partials the partials to set */ public void setPartials(String partials) { this.partials = partials; } /** * @return if picture is the imported media file type */ public boolean isPicture() { return picture; } /** * @param picture search for imported pictures */ public void setPicture(boolean picture) { this.picture = picture; } /** * @param req the http request with properties. Convenience method for * calling code. */ public void setProperties(HttpServletRequest req) { setArchived(req.getParameter("archived")); setAutodelete(req.getParameter("autodelete")); setCaseSensitive("on".equals(req.getParameter("casesensitive"))); setCategories(req.getParameterValues("Categories")); setChannels(req.getParameterValues("Channels")); setDontlike(req.getParameter("dontlike")); setDVD("on".equals(req.getParameter("DVD"))); // setEndtime(null); setExactTitle("on".equals(req.getParameter("ExactTitle"))); setFavorite(req.getParameter("favorite")); setFavoriteId(req.getParameter("FavoriteId")); setFields(req.getParameterValues("search_fields")); setFilename(req.getParameter("filename")); setFirstRuns(req.getParameter("firstruns")); setHDTV(req.getParameter("hdtv")); // setFiltertime(false); setManRec(req.getParameter("manrec")); setMusic("on".equals(req.getParameter("Music"))); setPartials(req.getParameter("partials")); setPicture("on".equals(req.getParameter("Picture"))); setRegex("on".equals(req.getParameter("regex"))); setSearchString(req.getParameter("SearchString")); setSearchType(req.getParameter("searchType")); setSort1(req.getParameter("sort1")); setSort2(req.getParameter("sort2")); // setStarttime(null); setTimeRange(req.getParameter("TimeRange")); setVideo("on".equals(req.getParameter("Video"))); setWatched(req.getParameter("watched")); } /** * @return if regex is used for filename */ public boolean isRegex() { return regex; } /** * @param regex use regex for filename */ public void setRegex(boolean regex) { this.regex = regex; } /** * @return the searchString */ public String getSearchString() { return searchString; } /** * @param searchString the searchString to set */ public void setSearchString(String searchString) { this.searchString = searchString; } /** * @return the searchType */ public String getSearchType() { if (searchType == null) { return SEARCH_TYPE_AIRINGS; } else { return searchType; } } /** * @param searchType the searchType to set */ public void setSearchType(String searchType) { this.searchType = searchType; } /** * @returns if the search type is Airings */ public boolean isSearchTypeAirings() { return getSearchType().equalsIgnoreCase(SEARCH_TYPE_AIRINGS); } /** * @returns if the search type is Media Files */ public boolean isSearchTypeMediaFiles() { return getSearchType().equalsIgnoreCase(SEARCH_TYPE_MEDIA_FILES); } /** * @returns if the search type is TV Files */ public boolean isSearchTypeTVFiles() { return getSearchType().equalsIgnoreCase(SEARCH_TYPE_TV_FILES); } /** * @return the sort1 */ public String getSort1() { if (sort1 == null) { return "airdate_asc"; } else { return sort1; } } /** * @param sort1 the sort1 to set */ public void setSort1(String sort1) { this.sort1 = sort1; } /** * @return the sort2 */ public String getSort2() { if (sort2 == null) { return "none"; } else { return sort2; } } /** * @param sort2 the sort2 to set */ public void setSort2(String sort2) { this.sort2 = sort2; } /** * @return the starttime */ public Long getStarttime() { if (starttime == NOW) { return new Long(new Date().getTime()); } return starttime; } /** * @param starttime the starttime to set */ public void setStarttime(Long starttime) { this.starttime = starttime; } /** * @return if time is filtered */ public boolean isTimeFiltered() { // no time filter on media files return (!isSearchTypeMediaFiles() && !isSearchTypeTVFiles() && ((starttime != BEGINNING_OF_TIME) || (endtime != END_OF_TIME))); } /** * @param filtertime * the filtertime to set */ /* * public void setTimeFiltered(boolean filtertime) { this.filtertime = * filtertime; } */ /** * @return the timeRange */ public String getTimeRange() { if (timeRange == null) { return "0"; } else { return timeRange; } } /** * @param timeRange the timeRange to set */ public void setTimeRange(String timeRange) { if (timeRange != null) { if (timeRange.equals("0")) { // future airings setStarttime(NOW); setEndtime(END_OF_TIME); } else if (timeRange.equals("-1")) { // all airings setStarttime(BEGINNING_OF_TIME); setEndtime(END_OF_TIME); } else if (timeRange.equals("-999")) { // past airings // the past setEndtime(NOW); setStarttime(BEGINNING_OF_TIME); } else { // next n hours try { long timeRangeLong = Long.parseLong(timeRange); if (timeRangeLong > 0) { setStarttime(NOW); setEndtime(new Long(getStarttime().longValue() + timeRangeLong * 60 * 60 * 1000)); } } catch (Exception e) { } } } else { // future airings setStarttime(NOW); setEndtime(END_OF_TIME); } this.timeRange = timeRange; } /** * @return if video is the imported media file type */ public boolean isVideo() { return video; } /** * @param video search for imported videos */ public void setVideo(boolean video) { this.video = video; } /** * @return the watched */ public String getWatched() { if (watched == null) { return "any"; } else { return watched; } } /** * @param watched the watched to set */ public void setWatched(String watched) { this.watched = watched; } public Object doSearch() throws Exception { Object searchResults = null; if (searchString != null) { if (searchString != null && searchString.length() > 0) { // search string given: if (isSearchTypeAirings() || isSearchTypeMediaFiles() || isSearchTypeTVFiles()) { if (isExactTitle()) { searchResults = SageAPI.call("SearchByTitle", new Object[]{searchString}); } else { String[] fields = getFields(); List<String> fields_l = null; if (fields != null) fields_l = Arrays.asList(fields); else fields_l = new Vector<String>(); searchResults = SageAPI.call("SearchSelectedFields", new Object[]{ searchString, Boolean.FALSE, // case sensitive new Boolean(fields == null || fields_l.contains("title") || fields_l.contains("**ALL**")), new Boolean(fields_l.contains("episode") || fields_l.contains("**ALL**")), new Boolean(fields_l.contains("desc") || fields_l.contains("**ALL**")), new Boolean(fields_l.contains("people") || fields_l.contains("**ALL**")), new Boolean(fields_l.contains("category") || fields_l.contains("**ALL**")), new Boolean(fields_l.contains("rated") || fields_l.contains("**ALL**")), new Boolean(fields_l.contains("extrated") || fields_l.contains("**ALL**")), new Boolean(fields_l.contains("year") || fields_l.contains("**ALL**")), new Boolean(fields_l.contains("misc") || fields_l.contains("**ALL**")),}); } if (isSearchTypeMediaFiles() || isSearchTypeTVFiles()) { // only get airings with MediaFile searchResults = SageAPI.call("FilterByMethod", new Object[]{searchResults, "GetMediaFileForAiring", null, Boolean.FALSE}); // convert to MediaFiles int numres = Utility.Size(searchResults); Object newSearchResults = null; for (int i = 0; i < numres; i++) newSearchResults = SageAPI.call( "DataUnion", new Object[]{ newSearchResults, SageAPI.call("GetMediaFileForAiring", new Object[]{Utility.GetElement(searchResults, i)})}); boolean tvFiles = isSearchTypeTVFiles(); /* filter only/out TV files */ searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{newSearchResults, "IsTVFile", new Boolean(tvFiles)}); if (tvFiles) { searchResults = filterTvFiles(searchResults); } if (isSearchTypeMediaFiles()) { searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{ searchResults, (isVideo() ? "IsVideoFile" : "IsAiringObject") + (isDVD() ? "|IsDVD" : "|IsAiringObject") + (isMusic() ? "|IsMusicFile" : "|IsAiringObject") + (isPicture() ? "|IsPictureFile" : "|IsAiringObject"), Boolean.TRUE}); } // no time filter on media files searchResults = filterShowList(searchResults); } else { searchResults = filterShowList(searchResults); } } else { // unknown type searchResults = null; } } else { // no search string -- search for all airings in time range and // sort by title if (isSearchTypeAirings()) { searchResults = SageAPI.call("GetAiringsOnViewableChannelsAtTime", new Object[]{getStarttime(), getEndtime(), Boolean.FALSE}); } else if (isSearchTypeMediaFiles()) { searchResults = SageAPI.call("GetMediaFiles", null); searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsTVFile", Boolean.FALSE}); searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsLibraryFile", Boolean.TRUE}); searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{ searchResults, (isVideo() ? "IsVideoFile" : "IsAiringObject") + (isDVD() ? "|IsDVD" : "|IsAiringObject") + (isMusic() ? "|IsMusicFile" : "|IsAiringObject") + (isPicture() ? "|IsPictureFile" : "|IsAiringObject"), Boolean.TRUE}); } else if (isSearchTypeTVFiles()) { searchResults = SageAPI.call("GetMediaFiles", null); searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsTVFile", Boolean.TRUE}); searchResults = filterTvFiles(searchResults); } else { // unknown type searchResults = null; } searchResults = filterShowList(searchResults); } if (searchResults != null) { // search type doesn't matter, filter it if it has a media file searchResults = filterFilenames(searchResults); } } else if (favoriteId != null) { Object favorite = FavoriteAPI.GetFavoriteForID(Integer.parseInt(getFavoriteId())); if (isSearchTypeTVFiles()) { searchResults = FavoriteAPI.GetFavoriteAirings(favorite); // show recorded airings of favorites // filter only tv files searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsTVFile", Boolean.TRUE}); // no time filter on recorded airings searchResults = filterShowList(searchResults); } else { // show recorded and non-recorded airings of favorites in // specified time period searchResults = FavoriteAPI.GetFavoriteAirings(favorite); // add airings without media files searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsMusicFile", Boolean.FALSE}); searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsVideoFile", Boolean.FALSE}); searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsDVD", Boolean.FALSE}); searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsPictureFile", Boolean.FALSE}); // add airings with TV files Object searchResults2 = FavoriteAPI.GetFavoriteAirings(favorite); searchResults2 = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults2, "IsTVFile", Boolean.TRUE}); searchResults = SageAPI.call("DataUnion", new Object[]{searchResults, searchResults2}); searchResults = filterShowList(searchResults); } } searchResults = sort(searchResults); return searchResults; } private Object filterShowList(Object searchResults) throws Exception { if (manrec != null && !manrec.equals("any") && Utility.Size(searchResults) > 0) { searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsManualRecord", new Boolean(manrec.equalsIgnoreCase("set"))}); } if (watched != null && !watched.equals("any") && Utility.Size(searchResults) > 0) { searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsWatched", new Boolean(watched.equalsIgnoreCase("set"))}); } if (dontlike != null && !dontlike.equals("any") && Utility.Size(searchResults) > 0) { searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsDontLike", new Boolean(dontlike.equalsIgnoreCase("set"))}); } if (favorite != null && !favorite.equals("any") && Utility.Size(searchResults) > 0) { searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsFavorite", new Boolean(favorite.equalsIgnoreCase("set"))}); } if (firstRuns != null && !firstRuns.equals("any") && Utility.Size(searchResults) > 0) { searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsShowFirstRun", new Boolean(firstRuns.equalsIgnoreCase("set"))}); } if (hdtv != null && !hdtv.equals("any") && Utility.Size(searchResults) > 0) { searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsAiringHDTV", new Boolean(hdtv.equalsIgnoreCase("set"))}); } if (isTimeFiltered() && Utility.Size(searchResults) > 0) { searchResults = SageAPI.call("FilterByRange", new Object[]{searchResults, "GetAiringEndTime", getStarttime(), getEndtime(), Boolean.TRUE}); } if (getCategories() != null && getCategories().length > 0 && Utility.Size(searchResults) > 0) { // for each category List<String> categories_l = Arrays.asList(getCategories()); if (categories_l.contains("**Any**")) { if (getCategories().length > 1) setCategories(new String[]{"**Any**"}); } else { // filter by each category Object allFiltered = null; for (Iterator<String> it = categories_l.iterator(); it.hasNext(); ) { Object filtered = SageAPI.call("DataUnion", new Object[]{searchResults}); String category = URLDecoder.decode((String) it.next(), charset); filtered = SageAPI.call("FilterByMethod", new Object[]{filtered, "GetShowCategory", category, Boolean.TRUE}); allFiltered = SageAPI.call("DataUnion", new Object[]{allFiltered, filtered}); } searchResults = allFiltered; } } if (getChannels() != null && getChannels().length > 0 && Utility.Size(searchResults) > 0) { // for each category List<String> channels_l = Arrays.asList(getChannels()); if (channels_l.contains("**Any**")) { if (getChannels().length > 1) setChannels(new String[]{"**Any**"}); } else { // filter by each category Object allFiltered = null; for (Iterator<String> it = channels_l.iterator(); it.hasNext(); ) { String channel = (String) it.next(); try { Integer chID = new Integer(channel); Object channelObj = SageAPI.call("GetChannelForStationID", new Object[]{chID}); if (channelObj != null) { Object filtered = SageAPI.call("DataUnion", new Object[]{searchResults}); filtered = SageAPI.call("FilterByMethod", new Object[]{filtered, "GetChannel", channelObj, Boolean.TRUE}); allFiltered = SageAPI.call("DataUnion", new Object[]{allFiltered, filtered}); } } catch (Exception e) { e.printStackTrace(System.out); } } searchResults = allFiltered; } } return searchResults; } private Object filterTvFiles(Object searchResults) throws InvocationTargetException { if (archived != null && !archived.equals("any") && Utility.Size(searchResults) > 0) { searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsLibraryFile", new Boolean(archived.equalsIgnoreCase("set"))}); } if (manrec != null && !manrec.equals("any") && Utility.Size(searchResults) > 0) { searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsManualRecord", new Boolean(manrec.equalsIgnoreCase("set"))}); } if (autodelete != null && !autodelete.equals("any") && Utility.Size(searchResults) > 0) { // don't show currently recording files searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsFileCurrentlyRecording", Boolean.FALSE}); if (autodelete.equals("set")) { // a manual recording is never automatically deleted searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsManualRecord", Boolean.FALSE}); // filter out library (archived) files, they are never deleted // by Sage searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsLibraryFile", Boolean.FALSE}); } // check for auto-delete and filter only/out int numres = Utility.Size(searchResults); Object newSearchResults = null; for (int i = 0; i < numres; i++) { Object sageAiring = Utility.GetElement(searchResults, i); Object sageFavorite = SageAPI.call("GetFavoriteForAiring", new Object[]{sageAiring}); if (autodelete.equals("set")) { // if it's a favorite, check for auto-delete // if not, it's an intelligent recording (assumption is // valid because of previous filtering) if ((sageFavorite == null) || (booleanApi("IsAutoDelete", new Object[]{sageFavorite}))) { newSearchResults = SageAPI.call("DataUnion", new Object[]{newSearchResults, sageAiring}); } } else if (autodelete.equals("cleared")) { // a manual recording is never automatically deleted // archived recordings are never automatically deleted // if it's a favorite, check for auto-delete // if not, it's an intelligent recording (assumption is // valid because of previous filtering) and make sure it's // in the library if (((sageFavorite != null) && (!booleanApi("IsAutoDelete", new Object[]{sageFavorite}))) || (booleanApi("IsManualRecord", new Object[]{sageAiring})) || (booleanApi("IsLibraryFile", new Object[]{sageAiring}))) { newSearchResults = SageAPI.call("DataUnion", new Object[]{newSearchResults, sageAiring}); } } } searchResults = newSearchResults; } String partials = getPartials(); if (partials.equals("none")) searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsCompleteRecording", Boolean.TRUE}); else if (partials.equals("only")) { searchResults = SageAPI .call("FilterByBoolMethod", new Object[]{searchResults, "IsCompleteRecording", Boolean.FALSE}); searchResults = SageAPI.call("FilterByBoolMethod", new Object[]{searchResults, "IsManualRecord", Boolean.FALSE}); } return searchResults; } private boolean booleanApi(String api, Object[] objects) { try { return (Boolean) SageAPI.call(api, objects); } catch (Exception e) { e.printStackTrace(); } return false; } /** * Converts a windows wildcard pattern to a regex pattern * * @param wildcard - Wildcard pattern containing * and ? * @return - a regex pattern that is equivalent to the windows wildcard * pattern */ public static String toRegex(String wildcard) { if (wildcard == null) { return null; } StringBuffer buffer = new StringBuffer(); char[] chars = wildcard.toCharArray(); for (int i = 0; i < chars.length; ++i) { if (chars[i] == '*') { buffer.append(".*"); } else if (chars[i] == '?') { buffer.append("."); } else if ("+()^$.{}[]|\\".indexOf(chars[i]) != -1) { buffer.append('\\').append(chars[i]); // prefix all // metacharacters with // backslash } else { buffer.append(chars[i]); } } return buffer.toString(); } private Object filterFilenames(Object searchResults) throws InvocationTargetException { if (filename != null && filename.length() > 0 && Utility.Size(searchResults) > 0) { List<Object> newSearchResults = new ArrayList<Object>(); String regex = isRegex() ? filename : toRegex(filename); Pattern pattern = null; try { pattern = Pattern.compile(regex, isCaseSensitive() ? 0 : (Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE)); } catch (PatternSyntaxException e) { // invalid regex, don't return any results return newSearchResults; } int numres = Utility.Size(searchResults); for (int i = 0; i < numres; i++) { Object sageAiring = Utility.GetElement(searchResults, i); File files[] = (File[]) SageAPI.call("GetSegmentFiles", new Object[]{sageAiring}); if (files != null) { for (int j = 0; j < files.length; j++) { Matcher matcher = pattern.matcher(files[j].getAbsolutePath()); if (matcher.matches()) { newSearchResults.add(sageAiring); } } } } searchResults = newSearchResults; } return searchResults; } private Object sort(Object searchResults) throws InvocationTargetException { if (searchResults != null) { int numprogs = Utility.Size(searchResults); if (numprogs > 0) { // Do sorting String sort = getSort2(); Boolean SortOrder = Boolean.FALSE; if (sort != null) { if (sort.endsWith("_desc")) SortOrder = Boolean.TRUE; if (sort.startsWith("airdate_")) { searchResults = SageAPI.call("Sort", new Object[]{searchResults, SortOrder, "GetAiringStartTime"}); } else if (sort.startsWith("origairdate_")) { searchResults = SageAPI.call("Sort", new Object[]{searchResults, SortOrder, "GetOriginalAiringDate"}); } else if (sort.startsWith("title_")) { searchResults = SageAPI.call("Sort", new Object[]{searchResults, SortOrder, "GetAiringTitle"}); } else if (sort.startsWith("episode_")) { searchResults = SageAPI.call("Sort", new Object[]{searchResults, SortOrder, "GetShowEpisode"}); } } sort = getSort1(); if (sort == null) sort = "airdate_asc"; SortOrder = Boolean.FALSE; if (sort.endsWith("_desc")) SortOrder = Boolean.TRUE; if (sort.startsWith("airdate_")) { searchResults = SageAPI.call("Sort", new Object[]{searchResults, SortOrder, "GetAiringStartTime"}); } else if (sort.startsWith("origairdate_")) { searchResults = SageAPI.call("Sort", new Object[]{searchResults, SortOrder, "GetOriginalAiringDate"}); } else if (sort.startsWith("title_")) { searchResults = SageAPI.call("Sort", new Object[]{searchResults, SortOrder, "GetAiringTitle"}); } else if (sort.startsWith("episode_")) { searchResults = SageAPI.call("Sort", new Object[]{searchResults, SortOrder, "GetShowEpisode"}); } else if (sort.startsWith("people_")) { searchResults = SageAPI.call("Sort", new Object[]{searchResults, SortOrder, "GetPeopleInShow"}); } } } return searchResults; } }
/* * Copyright 2013-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.netflix.eureka; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import com.netflix.appinfo.ApplicationInfoManager; import com.netflix.appinfo.HealthCheckHandler; import com.netflix.discovery.EurekaClient; import com.netflix.discovery.EurekaClientConfig; import com.netflix.discovery.shared.transport.jersey.EurekaJerseyClient; import com.sun.jersey.client.apache4.ApacheHttpClient4; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.springframework.aop.framework.Advised; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.SearchStrategy; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.context.event.ApplicationPreparedEvent; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.context.properties.source.ConfigurationPropertySources; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.boot.test.util.TestPropertyValues; import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration; import org.springframework.cloud.client.discovery.DiscoveryClient; import org.springframework.cloud.client.discovery.health.DiscoveryClientHealthIndicator; import org.springframework.cloud.client.serviceregistry.AutoServiceRegistrationProperties; import org.springframework.cloud.commons.util.UtilAutoConfiguration; import org.springframework.cloud.context.refresh.ContextRefresher; import org.springframework.cloud.context.scope.GenericScope; import org.springframework.cloud.netflix.eureka.config.DiscoveryClientOptionalArgsConfiguration; import org.springframework.cloud.netflix.eureka.serviceregistry.EurekaServiceRegistry; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationListener; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.core.env.ConfigurableEnvironment; import org.springframework.core.env.MutablePropertySources; import org.springframework.core.env.SystemEnvironmentPropertySource; import org.springframework.test.util.ReflectionTestUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.AssertionsForClassTypes.fail; /** * @author Spencer Gibb * @author Matt Jenkins * @author Olga Maciaszek-Sharma * @author Tim Ysewyn */ class EurekaClientAutoConfigurationTests { private AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); @AfterEach void after() { if (this.context != null && this.context.isActive()) { this.context.close(); } } private void setupContext(Class<?>... config) { ConfigurationPropertySources.attach(this.context.getEnvironment()); this.context.register(PropertyPlaceholderAutoConfiguration.class, DiscoveryClientOptionalArgsConfiguration.class, EurekaDiscoveryClientConfiguration.class); for (Class<?> value : config) { this.context.register(value); } this.context.register(TestConfiguration.class); this.context.refresh(); } @Test void shouldSetManagementPortInMetadataMapIfEqualToServerPort() throws Exception { TestPropertyValues.of("server.port=8989").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getMetadataMap().get("management.port")).isEqualTo("8989"); } @Test void shouldNotSetManagementAndJmxPortsInMetadataMap() throws Exception { TestPropertyValues.of("server.port=8989", "management.server.port=0").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getMetadataMap().get("management.port")).isEqualTo(null); assertThat(instance.getMetadataMap().get("jmx.port")).isEqualTo(null); } @Test void shouldSetManagementAndJmxPortsInMetadataMap() throws Exception { TestPropertyValues.of("management.server.port=9999", "com.sun.management.jmxremote.port=6789") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getMetadataMap().get("management.port")).isEqualTo("9999"); assertThat(instance.getMetadataMap().get("jmx.port")).isEqualTo("6789"); } @Test void shouldNotResetManagementAndJmxPortsInMetadataMap() throws Exception { TestPropertyValues.of("management.server.port=9999", "eureka.instance.metadata-map.jmx.port=9898", "eureka.instance.metadata-map.management.port=7878").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getMetadataMap().get("management.port")).isEqualTo("7878"); assertThat(instance.getMetadataMap().get("jmx.port")).isEqualTo("9898"); } @Test void nonSecurePortPeriods() { testNonSecurePort("server.port"); } @Test void nonSecurePortUnderscores() { testNonSecurePortSystemProp("SERVER_PORT"); } @Test void nonSecurePort() { testNonSecurePortSystemProp("PORT"); assertThat(this.context.getBeanDefinition("eurekaClient").getFactoryMethodName()).isEqualTo("eurekaClient"); } @Test void securePortPeriods() { testSecurePort("server.port"); } @Test void securePortUnderscores() { TestPropertyValues.of("eureka.instance.secure-port-enabled=true").applyTo(this.context); addSystemEnvironment(this.context.getEnvironment(), "SERVER_PORT:8443"); setupContext(); assertThat(getInstanceConfig().getSecurePort()).isEqualTo(8443); } @Test void securePort() { testSecurePort("PORT"); assertThat(this.context.getBeanDefinition("eurekaClient").getFactoryMethodName()).isEqualTo("eurekaClient"); } @Test void managementPort() { TestPropertyValues.of("server.port=8989", "management.server.port=9999").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().contains("9999")).as("Wrong status page: " + instance.getStatusPageUrl()) .isTrue(); } @Test void statusPageUrlPathAndManagementPort() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "eureka.instance.statusPageUrlPath=/myStatusPage").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().contains("/myStatusPage")) .as("Wrong status page: " + instance.getStatusPageUrl()).isTrue(); } @Test void healthCheckUrlPathAndManagementPort() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "eureka.instance.healthCheckUrlPath=/myHealthCheck").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getHealthCheckUrl().contains("/myHealthCheck")) .as("Wrong health check: " + instance.getHealthCheckUrl()).isTrue(); } @Test void statusPageUrl_and_healthCheckUrl_do_not_contain_server_context_path() throws Exception { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "server.contextPath=/service") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().endsWith(":9999/actuator/info")) .as("Wrong status page: " + instance.getStatusPageUrl()).isTrue(); assertThat(instance.getHealthCheckUrl().endsWith(":9999/actuator/health")) .as("Wrong health check: " + instance.getHealthCheckUrl()).isTrue(); } @Test void statusPageUrl_and_healthCheckUrl_contain_management_context_path() throws Exception { TestPropertyValues.of("server.port=8989", "management.server.servlet.context-path=/management") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().endsWith(":8989/management/actuator/info")) .as("Wrong status page: " + instance.getStatusPageUrl()).isTrue(); assertThat(instance.getHealthCheckUrl().endsWith(":8989/management/actuator/health")) .as("Wrong health check: " + instance.getHealthCheckUrl()).isTrue(); } @Test void statusPageUrl_and_healthCheckUrl_contain_management_context_path_random_port() throws Exception { TestPropertyValues.of("server.port=0", "management.server.servlet.context-path=/management") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrlPath().equals("/management/actuator/info")) .as("Wrong status page: " + instance.getStatusPageUrlPath()).isTrue(); assertThat(instance.getHealthCheckUrlPath().equals("/management/actuator/health")) .as("Wrong health check: " + instance.getHealthCheckUrlPath()).isTrue(); } @Test void statusPageUrlPathAndManagementPortAndContextPath() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "management.server.servlet.context-path=/manage", "eureka.instance.status-page-url-path=/myStatusPage") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().endsWith(":9999/manage/myStatusPage")) .as("Wrong status page: " + instance.getStatusPageUrl()).isTrue(); } @Test void healthCheckUrlPathAndManagementPortAndContextPath() { TestPropertyValues .of("server.port=8989", "management.server.port=9999", "management.server.servlet.context-path=/manage", "eureka.instance.health-check-url-path=/myHealthCheck") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getHealthCheckUrl().endsWith(":9999/manage/myHealthCheck")) .as("Wrong health check: " + instance.getHealthCheckUrl()).isTrue(); } @Test void statusPageUrlPathAndManagementPortAndContextPathKebobCase() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "management.server.servlet.context-path=/manage", "eureka.instance.status-page-url-path=/myStatusPage") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().endsWith(":9999/manage/myStatusPage")) .as("Wrong status page: " + instance.getStatusPageUrl()).isTrue(); } @Test void healthCheckUrlPathAndManagementPortAndContextPathKebobCase() { TestPropertyValues .of("server.port=8989", "management.server.port=9999", "management.server.servlet.context-path=/manage", "eureka.instance.health-check-url-path=/myHealthCheck") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getHealthCheckUrl().endsWith(":9999/manage/myHealthCheck")) .as("Wrong health check: " + instance.getHealthCheckUrl()).isTrue(); } @Test void statusPageUrl_and_healthCheckUrl_contain_management_base_path() throws Exception { TestPropertyValues.of("server.port=8989", "management.server.base-path=/management").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().endsWith(":8989/management/actuator/info")) .as("Wrong status page: " + instance.getStatusPageUrl()).isTrue(); assertThat(instance.getHealthCheckUrl().endsWith(":8989/management/actuator/health")) .as("Wrong health check: " + instance.getHealthCheckUrl()).isTrue(); } @Test void statusPageUrl_and_healthCheckUrl_contain_management_base_path_random_port() throws Exception { TestPropertyValues.of("server.port=0", "management.server.base-path=/management").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrlPath().equals("/management/actuator/info")) .as("Wrong status page: " + instance.getStatusPageUrlPath()).isTrue(); assertThat(instance.getHealthCheckUrlPath().equals("/management/actuator/health")) .as("Wrong health check: " + instance.getHealthCheckUrlPath()).isTrue(); } @Test void statusPageUrlPathAndManagementPortAndBasePath() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "management.server.base-path=/manage", "eureka.instance.status-page-url-path=/myStatusPage").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().endsWith(":9999/manage/myStatusPage")) .as("Wrong status page: " + instance.getStatusPageUrl()).isTrue(); } @Test void healthCheckUrlPathAndManagementPortAndBasePath() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "management.server.base-path=/manage", "eureka.instance.health-check-url-path=/myHealthCheck").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getHealthCheckUrl().endsWith(":9999/manage/myHealthCheck")) .as("Wrong health check: " + instance.getHealthCheckUrl()).isTrue(); } @Test void statusPageUrlPathAndManagementPortAndBasePathKebobCase() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "management.server.base-path=/manage", "eureka.instance.status-page-url-path=/myStatusPage").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().endsWith(":9999/manage/myStatusPage")) .as("Wrong status page: " + instance.getStatusPageUrl()).isTrue(); } @Test void healthCheckUrlPathAndManagementPortAndBasePathKebobCase() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "management.server.base-path=/manage", "eureka.instance.health-check-url-path=/myHealthCheck").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getHealthCheckUrl().endsWith(":9999/manage/myHealthCheck")) .as("Wrong health check: " + instance.getHealthCheckUrl()).isTrue(); } @Test void healthCheckUrlPathWithServerPortAndContextPathKebobCase() { TestPropertyValues .of("server.port=8989", "server.servlet.context-path=/servletContextPath", "eureka.instance.health-check-url-path=${server.servlet.context-path:}/myHealthCheck") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getHealthCheckUrl()).as("Wrong health check: " + instance.getHealthCheckUrl()) .endsWith(":8989/servletContextPath/myHealthCheck"); } @Test void statusPageUrlPathAndManagementPortKabobCase() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "eureka.instance.status-page-url-path=/myStatusPage").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().contains("/myStatusPage")) .as("Wrong status page: " + instance.getStatusPageUrl()).isTrue(); } @Test void statusPageUrlAndPreferIpAddress() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "eureka.instance.hostname=foo", "eureka.instance.prefer-ip-address:true").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl()).as("statusPageUrl is wrong") .isEqualTo("http://" + instance.getIpAddress() + ":9999/actuator/info"); assertThat(instance.getHealthCheckUrl()).as("healthCheckUrl is wrong") .isEqualTo("http://" + instance.getIpAddress() + ":9999/actuator/health"); } @Test void statusPageAndHealthCheckUrlsShouldSetUserDefinedIpAddress() { TestPropertyValues .of("server.port=8989", "management.server.port=9999", "eureka.instance.hostname=foo", "eureka.instance.ip-address:192.168.13.90", "eureka.instance.prefer-ip-address:true") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl()).as("statusPageUrl is wrong") .isEqualTo("http://192.168.13.90:9999/actuator/info"); assertThat(instance.getHealthCheckUrl()).as("healthCheckUrl is wrong") .isEqualTo("http://192.168.13.90:9999/actuator/health"); } @Test void healthCheckUrlPathAndManagementPortKabobCase() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "eureka.instance.health-check-url-path=/myHealthCheck").applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getHealthCheckUrl().contains("/myHealthCheck")) .as("Wrong health check: " + instance.getHealthCheckUrl()).isTrue(); } @Test void statusPageUrlPathAndManagementPortUpperCase() { TestPropertyValues.of("server.port=8989", "management.server.port=9999").applyTo(this.context); addSystemEnvironment(this.context.getEnvironment(), "EUREKA_INSTANCE_STATUS_PAGE_URL_PATH=/myStatusPage"); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().contains("/myStatusPage")) .as("Wrong status page: " + instance.getStatusPageUrl()).isTrue(); } @Test void healthCheckUrlPathAndManagementPortUpperCase() { TestPropertyValues.of("server.port=8989", "management.server.port=9999").applyTo(this.context); addSystemEnvironment(this.context.getEnvironment(), "EUREKA_INSTANCE_HEALTH_CHECK_URL_PATH=/myHealthCheck"); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getHealthCheckUrl().contains("/myHealthCheck")) .as("Wrong health check: " + instance.getHealthCheckUrl()).isTrue(); } @Test void hostname() { TestPropertyValues.of("server.port=8989", "management.server.port=9999", "eureka.instance.hostname=foo") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class); EurekaInstanceConfigBean instance = this.context.getBean(EurekaInstanceConfigBean.class); assertThat(instance.getStatusPageUrl().contains("foo")).as("Wrong status page: " + instance.getStatusPageUrl()) .isTrue(); } @Test void refreshScopedBeans() { setupContext(RefreshAutoConfiguration.class); assertThat(this.context.getBeanDefinition("eurekaClient").getBeanClassName()) .startsWith(GenericScope.class.getName() + "$LockedScopedProxyFactoryBean"); assertThat(this.context.getBeanDefinition("eurekaApplicationInfoManager").getBeanClassName()) .startsWith(GenericScope.class.getName() + "$LockedScopedProxyFactoryBean"); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Test void shouldReregisterHealthCheckHandlerAfterRefresh() throws Exception { TestPropertyValues .of("eureka.client.healthcheck.enabled=true", "spring.cloud.config.import-check.enabled=false", "spring.cloud.refresh.additionalPropertySourcesToRetain=test") .applyTo(this.context); setupContext(RefreshAutoConfiguration.class, AutoServiceRegistrationConfiguration.class); EurekaClient oldEurekaClient = getLazyInitEurekaClient(); HealthCheckHandler healthCheckHandler = this.context.getBean("eurekaHealthCheckHandler", HealthCheckHandler.class); assertThat(healthCheckHandler).isInstanceOf(EurekaHealthCheckHandler.class); assertThat(oldEurekaClient.getHealthCheckHandler()).isSameAs(healthCheckHandler); ContextRefresher refresher = this.context.getBean(ContextRefresher.class); if (refresher instanceof ApplicationListener) { ApplicationListener<ApplicationPreparedEvent> listener = (ApplicationListener) refresher; listener.onApplicationEvent( new ApplicationPreparedEvent(Mockito.mock(SpringApplication.class), new String[0], this.context)); } refresher.refresh(); EurekaClient newEurekaClient = getLazyInitEurekaClient(); HealthCheckHandler newHealthCheckHandler = this.context.getBean("eurekaHealthCheckHandler", HealthCheckHandler.class); assertThat(healthCheckHandler).isSameAs(newHealthCheckHandler); assertThat(oldEurekaClient).isNotSameAs(newEurekaClient); assertThat(newEurekaClient.getHealthCheckHandler()).isSameAs(healthCheckHandler); } @Test void shouldCloseDiscoveryClient() throws Exception { TestPropertyValues.of("eureka.client.healthcheck.enabled=true").applyTo(this.context); setupContext(RefreshAutoConfiguration.class, AutoServiceRegistrationConfiguration.class); AtomicBoolean isShutdown = (AtomicBoolean) ReflectionTestUtils.getField(getLazyInitEurekaClient(), "isShutdown"); assertThat(isShutdown.get()).isFalse(); this.context.close(); assertThat(isShutdown.get()).isTrue(); } @Test void basicAuth() { TestPropertyValues .of("server.port=8989", "eureka.client.serviceUrl.defaultZone=https://user:foo@example.com:80/eureka") .applyTo(this.context); setupContext(MockClientConfiguration.class); // ApacheHttpClient4 http = this.context.getBean(ApacheHttpClient4.class); // Mockito.verify(http).addFilter(Matchers.any(HTTPBasicAuthFilter.class)); } @Test void testDefaultAppName() throws Exception { setupContext(); assertThat(getInstanceConfig().getAppname()).isEqualTo("unknown"); assertThat(getInstanceConfig().getVirtualHostName()).isEqualTo("unknown"); assertThat(getInstanceConfig().getSecureVirtualHostName()).isEqualTo("unknown"); } @Test void testAppName() throws Exception { TestPropertyValues.of("spring.application.name=mytest").applyTo(this.context); setupContext(); assertThat(getInstanceConfig().getAppname()).isEqualTo("mytest"); assertThat(getInstanceConfig().getVirtualHostName()).isEqualTo("mytest"); assertThat(getInstanceConfig().getSecureVirtualHostName()).isEqualTo("mytest"); } @Test void testAppNameUpper() throws Exception { addSystemEnvironment(this.context.getEnvironment(), "SPRING_APPLICATION_NAME=mytestupper"); setupContext(); assertThat(getInstanceConfig().getAppname()).isEqualTo("mytestupper"); assertThat(getInstanceConfig().getVirtualHostName()).isEqualTo("mytestupper"); assertThat(getInstanceConfig().getSecureVirtualHostName()).isEqualTo("mytestupper"); } private void addSystemEnvironment(ConfigurableEnvironment environment, String... pairs) { MutablePropertySources sources = environment.getPropertySources(); Map<String, Object> map = getOrAdd(sources, "testsysenv"); for (String pair : pairs) { int index = getSeparatorIndex(pair); String key = pair.substring(0, index > 0 ? index : pair.length()); String value = index > 0 ? pair.substring(index + 1) : ""; map.put(key.trim(), value.trim()); } } @SuppressWarnings("unchecked") private static Map<String, Object> getOrAdd(MutablePropertySources sources, String name) { if (sources.contains(name)) { return (Map<String, Object>) sources.get(name).getSource(); } Map<String, Object> map = new HashMap<>(); sources.addFirst(new SystemEnvironmentPropertySource(name, map)); return map; } private static int getSeparatorIndex(String pair) { int colonIndex = pair.indexOf(":"); int equalIndex = pair.indexOf("="); if (colonIndex == -1) { return equalIndex; } if (equalIndex == -1) { return colonIndex; } return Math.min(colonIndex, equalIndex); } @Test void testInstanceNamePreferred() throws Exception { addSystemEnvironment(this.context.getEnvironment(), "SPRING_APPLICATION_NAME=mytestspringappname"); TestPropertyValues.of("eureka.instance.appname=mytesteurekaappname").applyTo(this.context); setupContext(); assertThat(getInstanceConfig().getAppname()).isEqualTo("mytesteurekaappname"); } @Test void eurekaHealthIndicatorCreated() { setupContext(); this.context.getBean(EurekaHealthIndicator.class); } @Test void eurekaClientClosed() { setupContext(TestEurekaClientConfiguration.class); if (this.context != null) { CountDownLatch latch = this.context.getBean(CountDownLatch.class); this.context.close(); assertThat(latch.getCount()).isEqualTo(0); } } @Test void eurekaConfigNotLoadedWhenDiscoveryClientDisabled() { TestPropertyValues.of("spring.cloud.discovery.enabled=false").applyTo(this.context); setupContext(TestConfiguration.class); assertBeanNotPresent(EurekaClientConfigBean.class); assertBeanNotPresent(EurekaInstanceConfigBean.class); assertBeanNotPresent(DiscoveryClient.class); assertBeanNotPresent(EurekaServiceRegistry.class); assertBeanNotPresent(EurekaClient.class); } @Test void shouldNotHaveDiscoveryClientWhenBlockingDiscoveryDisabled() { new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(UtilAutoConfiguration.class, DiscoveryClientOptionalArgsConfiguration.class, EurekaClientAutoConfiguration.class, EurekaDiscoveryClientConfiguration.class)) .withPropertyValues("spring.cloud.discovery.blocking.enabled=false").run(context -> { assertThat(context).doesNotHaveBean(DiscoveryClient.class); assertThat(context).doesNotHaveBean(DiscoveryClientHealthIndicator.class); }); } private void assertBeanNotPresent(Class beanClass) { try { context.getBean(beanClass); fail("Bean of type " + beanClass + " should not have been created."); } catch (NoSuchBeanDefinitionException exception) { // expected exception } } private void testNonSecurePortSystemProp(String propName) { addSystemEnvironment(this.context.getEnvironment(), propName + ":8888"); setupContext(); assertThat(getInstanceConfig().getNonSecurePort()).isEqualTo(8888); } private void testNonSecurePort(String propName) { TestPropertyValues.of(propName + ":8888").applyTo(this.context); setupContext(); assertThat(getInstanceConfig().getNonSecurePort()).isEqualTo(8888); } private void testSecurePort(String propName) { TestPropertyValues.of("eureka.instance.secure-port-enabled=true", propName + ":8443").applyTo(this.context); setupContext(); assertThat(getInstanceConfig().getSecurePort()).isEqualTo(8443); } private EurekaInstanceConfigBean getInstanceConfig() { return this.context.getBean(EurekaInstanceConfigBean.class); } private EurekaClient getLazyInitEurekaClient() throws Exception { return (EurekaClient) ((Advised) this.context.getBean("eurekaClient", EurekaClient.class)).getTargetSource() .getTarget(); } @Configuration(proxyBeanMethods = false) @EnableConfigurationProperties @Import({ UtilAutoConfiguration.class, EurekaClientAutoConfiguration.class }) protected static class TestConfiguration { } @Configuration protected static class TestEurekaClientConfiguration { @Bean public CountDownLatch countDownLatch() { return new CountDownLatch(1); } @Bean(destroyMethod = "shutdown") @ConditionalOnMissingBean(value = EurekaClient.class, search = SearchStrategy.CURRENT) public EurekaClient eurekaClient(ApplicationInfoManager manager, EurekaClientConfig config, ApplicationContext context) { return new CloudEurekaClient(manager, config, null, context) { @Override public synchronized void shutdown() { CountDownLatch latch = countDownLatch(); if (latch.getCount() == 1) { latch.countDown(); } super.shutdown(); } }; } } @Configuration(proxyBeanMethods = false) protected static class MockClientConfiguration { @Bean public EurekaJerseyClient jerseyClient() { EurekaJerseyClient mock = Mockito.mock(EurekaJerseyClient.class); Mockito.when(mock.getClient()).thenReturn(apacheClient()); return mock; } @Bean public ApacheHttpClient4 apacheClient() { return Mockito.mock(ApacheHttpClient4.class); } } @Configuration(proxyBeanMethods = false) @EnableConfigurationProperties(AutoServiceRegistrationProperties.class) public static class AutoServiceRegistrationConfiguration { } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.purdue.CampusFeed.Utils; import android.annotation.TargetApi; import android.content.Context; import android.content.SharedPreferences; import android.os.AsyncTask; import android.os.Build; import android.os.StrictMode; import android.util.Log; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.gcm.GoogleCloudMessaging; import com.nostra13.universalimageloader.core.DisplayImageOptions; import com.nostra13.universalimageloader.core.ImageLoader; import com.nostra13.universalimageloader.core.ImageLoaderConfiguration; import com.purdue.CampusFeed.Activities.ContactDetailActivity; import com.purdue.CampusFeed.Activities.ContactsListActivity; import java.io.IOException; /** * This class contains static utility methods. */ public class Utils { public final static String[] categories = new String[]{"Recreation", "Social", "Education", "University Event", "Charity"}; //---------GCM variables------------------ public static final String TAG = "CampusFeed"; /** * Tag used on log messages. */ static final String GCM_DEBUG_TAG = "GCMDemo"; private final static String USER_ID = "USER_ID", ACCESS_TOKEN = "ACCESS_TOKEN"; public static String gcmRegid; public static String facebook_userID, facebook_accessToken; /** * Substitute you own sender ID here. This is the project number you got * from the API Console, as described in "Getting Started." */ static String SENDER_ID = "872065754556"; static GoogleCloudMessaging gcm; private static ImageLoader mImageLoader; private Utils() { } /** * Registers the application with GCM servers asynchronously. * <p/> * Stores the registration ID and app versionCode in the application's * shared preferences. */ @SuppressWarnings("unchecked") private static void registerInBackground(final Context context) { new AsyncTask() { protected Object doInBackground(Object[] params) { String msg = ""; try { if (gcm == null) { gcm = GoogleCloudMessaging.getInstance(context); } gcmRegid = gcm.register(SENDER_ID); msg = "Device registered, registration ID=" + gcmRegid; // You should send the registration ID to your server over HTTP, // so it can use GCM/HTTP or CCS to send messages to your app. // The request to your server should be authenticated if your app // is using accounts. //sendRegistrationIdToBackend(); } catch (IOException ex) { msg = "Error :" + ex.getMessage(); gcmRegid = "FAIL"; Log.d("PRANAV", ex.toString()); // If there is an error, don't just keep trying to register. // Require the user to click a button again, or perform // exponential back-off. } return msg; } }.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, null, null, null); } /** * Sends the registration ID to your server over HTTP, so it can use GCM/HTTP * or CCS to send messages to your app. Not needed for this demo since the * device sends upstream messages to a server that echoes back the message * using the 'from' address in the message. */ private static void sendRegistrationIdToBackend() { // Your implementation here. } public static boolean checkPlayServices(Context context) { return GooglePlayServicesUtil.isGooglePlayServicesAvailable(context) == ConnectionResult.SUCCESS; } public static void init(Context c) { //init contact stuff DisplayImageOptions defaultOptions = new DisplayImageOptions.Builder() .cacheInMemory(true).cacheOnDisc(true).build(); ImageLoaderConfiguration config = new ImageLoaderConfiguration.Builder( c).defaultDisplayImageOptions(defaultOptions).build(); mImageLoader = ImageLoader.getInstance(); mImageLoader.init(config); //init GCM stuff (added by Pranav) if (checkPlayServices(c)) { //register for GCM everytime, crappy implementation //will change later gcm = GoogleCloudMessaging.getInstance(c); registerInBackground(c); } else Log.i(GCM_DEBUG_TAG, "No valid Google Play Services APK found."); } public static ImageLoader getImageLoader() { return mImageLoader; } /** * Enables strict mode. This should only be called when debugging the * application and is useful for finding some potential bugs or best * practice violations. */ @TargetApi(11) public static void enableStrictMode() { // Strict mode is only available on gingerbread or later if (Utils.hasGingerbread()) { // Enable all thread strict mode policies StrictMode.ThreadPolicy.Builder threadPolicyBuilder = new StrictMode.ThreadPolicy.Builder() .detectAll().penaltyLog(); // Enable all VM strict mode policies StrictMode.VmPolicy.Builder vmPolicyBuilder = new StrictMode.VmPolicy.Builder() .detectAll().penaltyLog(); // Honeycomb introduced some additional strict mode features if (Utils.hasHoneycomb()) { // Flash screen when thread policy is violated threadPolicyBuilder.penaltyFlashScreen(); // For each activity class, set an instance limit of 1. Any more // instances and // there could be a memory leak. vmPolicyBuilder.setClassInstanceLimit( ContactsListActivity.class, 1).setClassInstanceLimit( ContactDetailActivity.class, 1); } // Use builders to enable strict mode policies StrictMode.setThreadPolicy(threadPolicyBuilder.build()); StrictMode.setVmPolicy(vmPolicyBuilder.build()); } } /** * Uses static final constants to detect if the device's platform version is * Gingerbread or later. */ public static boolean hasGingerbread() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD; } /** * Uses static final constants to detect if the device's platform version is * Honeycomb or later. */ public static boolean hasHoneycomb() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB; } /** * Uses static final constants to detect if the device's platform version is * Honeycomb MR1 or later. */ public static boolean hasHoneycombMR1() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1; } /** * Uses static final constants to detect if the device's platform version is * ICS or later. */ public static boolean hasICS() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH; } public static boolean getLoginCredentials(Context c) { SharedPreferences prefs = getSharedPreferences(c); facebook_userID = prefs.getString(USER_ID, ""); facebook_accessToken = prefs.getString(ACCESS_TOKEN, ""); if (facebook_userID.isEmpty() || facebook_accessToken.isEmpty()) { facebook_userID = null; facebook_accessToken = null; return false; } return true; } public static void saveLoginCredential(Context c) { SharedPreferences prefs = getSharedPreferences(c); SharedPreferences.Editor editor = prefs.edit(); editor.putString(ACCESS_TOKEN, facebook_accessToken); editor.putString(USER_ID, facebook_userID); editor.commit(); } public static void removeLoginCredentials(Context c) { SharedPreferences prefs = getSharedPreferences(c); SharedPreferences.Editor editor = prefs.edit(); Utils.facebook_userID = null; Utils.facebook_accessToken = null; editor.remove(ACCESS_TOKEN); editor.remove(USER_ID); editor.commit(); } private static SharedPreferences getSharedPreferences(Context c) { return c.getSharedPreferences(TAG, Context.MODE_PRIVATE); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.swarm.tokenization.beam; import com.google.auto.value.AutoValue; import com.google.cloud.dlp.v2.DlpServiceClient; import com.google.privacy.dlp.v2.ContentItem; import com.google.privacy.dlp.v2.DeidentifyConfig; import com.google.privacy.dlp.v2.FieldId; import com.google.privacy.dlp.v2.InspectConfig; import com.google.privacy.dlp.v2.ReidentifyContentRequest; import com.google.privacy.dlp.v2.ReidentifyContentResponse; import com.google.privacy.dlp.v2.Table; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; /** * A {@link PTransform} connecting to Cloud DLP (https://cloud.google.com/dlp/docs/libraries) and * inspecting text for identifying data according to provided settings. * * <p>The transform supports both delimited columnar input data and unstructured input. * * <p>If the headerColumns property is set and a sideinput with headers is added to the PTransform, * delimiter also should be set, else the results will be incorrect. If headerColumns is neither set * nor passed as sideinput, input is assumed to be unstructured. * * <p>Batch size defines how big are batches sent to DLP at once in bytes. * * <p>The transform consumes {@link KV} of {@link String}s (assumed to be filename as key and * contents as value) and outputs {@link KV} of {@link String} (eg. filename) and {@link * ReidentifyContentResponse}, which will contain {@link Table} of results for the user to consume. * * <p>Batch size defines how big are batches sent to DLP at once in bytes. * * <p>Either reidentifyTemplateName {@link String} or reidentifyConfig {@link DeidentifyConfig} need * to be set. inspectConfig {@link InspectConfig} and inspectTemplateName {@link String} are * optional. * * <p>Batch size defines how big are batches sent to DLP at once in bytes. */ @Experimental @AutoValue public abstract class DLPReidentifyText extends PTransform< PCollection<KV<String, Table.Row>>, PCollection<KV<String, ReidentifyContentResponse>>> { public static final Integer DLP_PAYLOAD_LIMIT_BYTES = 524000; /** @return Template name for data inspection. */ @Nullable public abstract String getInspectTemplateName(); /** @return Template name for data reidentification. */ @Nullable public abstract String getReidentifyTemplateName(); /** * @return Configuration object for data inspection. If present, supersedes the template settings. */ @Nullable public abstract InspectConfig getInspectConfig(); /** @return Configuration object for reidentification. If present, supersedes the template. */ @Nullable public abstract DeidentifyConfig getReidentifyConfig(); /** @return Delimiter to be used when splitting values from input strings into columns. */ @Nullable public abstract Character getColumnDelimiter(); /** @return List of column names if the input KV value is a delimited row. */ @Nullable public abstract PCollectionView<Map<String, List<String>>> getHeaderColumns(); /** @return Size of input elements batch to be sent to Cloud DLP service in one request. */ public abstract Integer getBatchSizeBytes(); /** @return ID of Google Cloud project to be used when deidentifying data. */ public abstract String getProjectId(); @AutoValue.Builder public abstract static class Builder { /** @param inspectTemplateName Template name for data inspection. */ public abstract DLPReidentifyText.Builder setInspectTemplateName(String inspectTemplateName); /** * @param inspectConfig Configuration object for data inspection. If present, supersedes the * template settings. */ public abstract DLPReidentifyText.Builder setInspectConfig(InspectConfig inspectConfig); /** * @param reidentifyConfig Configuration object for data deidentification. If present, * supersedes the template settings. */ public abstract DLPReidentifyText.Builder setReidentifyConfig( DeidentifyConfig reidentifyConfig); /** @param reidentifyTemplateName Template name for data deidentification. */ public abstract DLPReidentifyText.Builder setReidentifyTemplateName( String reidentifyTemplateName); /** * @param batchSize Size of input elements batch to be sent to Cloud DLP service in one request. */ public abstract DLPReidentifyText.Builder setBatchSizeBytes(Integer batchSize); /** * @param headerColumns List of column names if the input KV value is a delimited row in a map * keyed by table references. */ public abstract DLPReidentifyText.Builder setHeaderColumns( PCollectionView<Map<String, List<String>>> headerColumns); /** * @param delimiter Delimiter to be used when splitting values from input strings into columns. */ public abstract DLPReidentifyText.Builder setColumnDelimiter(Character delimiter); /** @param projectId ID of Google Cloud project to be used when deidentifying data. */ public abstract DLPReidentifyText.Builder setProjectId(String projectId); abstract DLPReidentifyText autoBuild(); public DLPReidentifyText build() { DLPReidentifyText dlpReidentifyText = autoBuild(); if (dlpReidentifyText.getReidentifyConfig() == null && dlpReidentifyText.getReidentifyTemplateName() == null) { throw new IllegalArgumentException( "Either reidentifyConfig or reidentifyTemplateName need to be set!"); } if (dlpReidentifyText.getBatchSizeBytes() > DLP_PAYLOAD_LIMIT_BYTES) { throw new IllegalArgumentException( String.format( "Batch size is too large! It should be smaller or equal than %d.", DLP_PAYLOAD_LIMIT_BYTES)); } if (dlpReidentifyText.getColumnDelimiter() == null && dlpReidentifyText.getHeaderColumns() != null) { throw new IllegalArgumentException( "Column delimiter should be set if headers are present."); } if (dlpReidentifyText.getHeaderColumns() == null && dlpReidentifyText.getColumnDelimiter() != null) { throw new IllegalArgumentException( "Column headers should be supplied when delimiter is present."); } return dlpReidentifyText; } } public static DLPReidentifyText.Builder newBuilder() { return new AutoValue_DLPReidentifyText.Builder(); } /** * The transform converts the contents of input PCollection into {@link Table.Row}s and then calls * Cloud DLP service to perform the reidentification according to provided settings. * * @param input input PCollection * @return PCollection after transformations */ @Override public PCollection<KV<String, ReidentifyContentResponse>> expand( PCollection<KV<String, Table.Row>> input) { return input .apply("Shard Contents", new ShardRows()) .apply("Batch Contents", ParDo.of(new BatchRequestForDLP(getBatchSizeBytes()))) .apply("Unshard Contents", ParDo.of(new UnshardRows())) .apply( "DLPReidentify", ParDo.of( new DLPReidentifyText.ReidentifyText( getProjectId(), getInspectTemplateName(), getReidentifyTemplateName(), getInspectConfig(), getReidentifyConfig(), getHeaderColumns())) .withSideInputs(getHeaderColumns())); } /** Performs the calls to Cloud DLP service on GCP. */ static class ReidentifyText extends DoFn<KV<String, Iterable<Table.Row>>, KV<String, ReidentifyContentResponse>> { private final String projectId; private final String inspectTemplateName; private final String reidentifyTemplateName; private final InspectConfig inspectConfig; private final DeidentifyConfig reidentifyConfig; private final PCollectionView<Map<String, List<String>>> headerColumns; private transient ReidentifyContentRequest.Builder requestBuilder; private transient DlpServiceClient dlpServiceClient; @Setup public void setup() throws IOException { requestBuilder = ReidentifyContentRequest.newBuilder().setParent(projectId); if (inspectTemplateName != null) { requestBuilder.setInspectTemplateName(inspectTemplateName); } if (inspectConfig != null) { requestBuilder.setInspectConfig(inspectConfig); } if (reidentifyConfig != null) { requestBuilder.setReidentifyConfig(reidentifyConfig); } if (reidentifyTemplateName != null) { requestBuilder.setReidentifyTemplateName(reidentifyTemplateName); } dlpServiceClient = DlpServiceClient.create(); } @Teardown public void teardown() { dlpServiceClient.close(); } /** * @param projectId ID of GCP project that should be used for deidentification. * @param inspectTemplateName Template name for inspection. Optional. * @param reidentifyTemplateName Template name for reidentification. Either this or * reidentifyConfig is required. * @param inspectConfig Configuration object for inspection. Optional. * @param reidentifyConfig Reidentification config containing data transformations. Either this * or reidentifyTemplateName is required. * @param headerColumns Header row of the table if applicable. */ public ReidentifyText( String projectId, String inspectTemplateName, String reidentifyTemplateName, InspectConfig inspectConfig, DeidentifyConfig reidentifyConfig, PCollectionView<Map<String, List<String>>> headerColumns) { this.projectId = projectId; this.inspectTemplateName = inspectTemplateName; this.reidentifyTemplateName = reidentifyTemplateName; this.inspectConfig = inspectConfig; this.reidentifyConfig = reidentifyConfig; this.headerColumns = headerColumns; } @ProcessElement public void processElement(ProcessContext context) throws IOException { String tableRef = context.element().getKey(); List<FieldId> tableHeaders; if (headerColumns != null) { Map<String, List<String>> headersByTableRefMap = context.sideInput(headerColumns); List<String> columns = headersByTableRefMap.get(tableRef); if (columns == null) { throw new RuntimeException( "Unable to find " + tableRef + " in the map with table references " + headersByTableRefMap.keySet()); } tableHeaders = columns.stream() .map(header -> FieldId.newBuilder().setName(header).build()) .collect(Collectors.toList()); } else { // handle unstructured input. tableHeaders = new ArrayList<>(); tableHeaders.add(FieldId.newBuilder().setName("value").build()); } Table table = Table.newBuilder() .addAllHeaders(tableHeaders) .addAllRows(context.element().getValue()) .build(); ContentItem contentItem = ContentItem.newBuilder().setTable(table).build(); this.requestBuilder.setItem(contentItem); ReidentifyContentResponse response = dlpServiceClient.reidentifyContent(requestBuilder.build()); context.output(KV.of(tableRef, response)); } } }
package mil.nga.geopackage.tiles.features; import java.awt.image.BufferedImage; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.Set; import mil.nga.geopackage.BoundingBox; import mil.nga.geopackage.GeoPackage; import mil.nga.geopackage.GeoPackageException; import mil.nga.geopackage.db.CoreSQLUtils; import mil.nga.geopackage.features.user.FeatureDao; import mil.nga.geopackage.features.user.FeatureResultSet; import mil.nga.geopackage.tiles.TileBoundingBoxUtils; import mil.nga.proj.Projection; import mil.nga.proj.ProjectionConstants; import mil.nga.proj.ProjectionFactory; /** * Feature Preview for drawing a preview tile from a feature table * * @author osbornb * @since 3.5.0 */ public class FeaturePreview { /** * GeoPackage */ private final GeoPackage geoPackage; /** * Feature Tiles for drawing */ private final FeatureTiles featureTiles; /** * Manual bounding box query flag for non indexed and empty contents bounds * feature tables */ private boolean manual = false; /** * Buffer percentage for drawing empty non features edges (greater than or * equal to 0.0 and less than 0.5) */ private double bufferPercentage = 0.0; /** * Query columns */ private Set<String> columns = new LinkedHashSet<>(); /** * Where clause */ private String where; /** * Where clause arguments */ private String[] whereArgs = null; /** * Query feature limit */ private Integer limit = null; /** * Constructor * * @param geoPackage * GeoPackage * @param featureTable * feature table */ public FeaturePreview(GeoPackage geoPackage, String featureTable) { this(geoPackage, geoPackage.getFeatureDao(featureTable)); } /** * Constructor * * @param geoPackage * GeoPackage * @param featureDao * feature DAO */ public FeaturePreview(GeoPackage geoPackage, FeatureDao featureDao) { this(geoPackage, new DefaultFeatureTiles(geoPackage, featureDao)); } /** * Constructor * * @param geoPackage * GeoPackage * @param featureTiles * feature tiles */ public FeaturePreview(GeoPackage geoPackage, FeatureTiles featureTiles) { this.geoPackage = geoPackage; this.featureTiles = featureTiles; FeatureDao featureDao = featureTiles.getFeatureDao(); columns.add(featureDao.getIdColumnName()); columns.add(featureDao.getGeometryColumnName()); where = CoreSQLUtils.quoteWrap(featureDao.getGeometryColumnName()) + " IS NOT NULL"; } /** * Get the GeoPackage * * @return GeoPackage */ public GeoPackage getGeoPackage() { return geoPackage; } /** * Get the feature tiles * * @return feature tiles */ public FeatureTiles getFeatureTiles() { return featureTiles; } /** * Is manual bounding box query enabled for non indexed and empty contents * bounds feature tables * * @return manual flag */ public boolean isManual() { return manual; } /** * Set the manual bounding box query flag for non indexed and empty contents * bounds feature tables * * @param manual * manual flag */ public void setManual(boolean manual) { this.manual = manual; } /** * Get the buffer percentage for drawing empty non features edges (i.e. 0.1 * equals 10% buffer edges) * * @return buffer percentage (greater than or equal to 0.0 and less than * 0.5) */ public double getBufferPercentage() { return bufferPercentage; } /** * Set the buffer percentage for drawing empty non features edges (i.e. 0.1 * equals 10% buffer edges) * * @param bufferPercentage * buffer percentage (greater than or equal to 0.0 and less than * 0.5) */ public void setBufferPercentage(double bufferPercentage) { if (bufferPercentage < 0.0 || bufferPercentage >= 0.5) { throw new GeoPackageException( "Buffer percentage must be in the range: 0.0 <= bufferPercentage < 0.5. invalid value: " + bufferPercentage); } this.bufferPercentage = bufferPercentage; } /** * Get the query columns * * @return columns */ public Set<String> getColumns() { return Collections.unmodifiableSet(columns); } /** * Add query columns * * @param columns * columns */ public void addColumns(Collection<String> columns) { this.columns.addAll(columns); } /** * Add query columns * * @param columns * columns */ public void addColumns(String[] columns) { for (String column : columns) { addColumn(column); } } /** * Add a query column * * @param column * column */ public void addColumn(String column) { columns.add(column); } /** * Get the where clause * * @return where */ public String getWhere() { return where; } /** * Set the where clause * * @param where * where */ public void setWhere(String where) { this.where = where; } /** * Append to the where clause * * @param where * where */ public void appendWhere(String where) { this.where = (this.where != null ? this.where + " AND " : "") + where; } /** * Get the where arguments * * @return where args */ public String[] getWhereArgs() { return whereArgs; } /** * Set the where arguments * * @param whereArgs * where arguments */ public void setWhereArgs(String[] whereArgs) { this.whereArgs = whereArgs; } /** * Get the feature query limit * * @return limit */ public Integer getLimit() { return limit; } /** * Set the feature query limit * * @param limit * limit */ public void setLimit(Integer limit) { this.limit = limit; } /** * Draw a preview image * * @return preview image */ public BufferedImage draw() { BufferedImage image = null; FeatureDao featureDao = featureTiles.getFeatureDao(); String table = featureDao.getTableName(); Projection webMercator = ProjectionFactory .getProjection(ProjectionConstants.EPSG_WEB_MERCATOR); BoundingBox boundingBox = geoPackage.getFeatureBoundingBox(webMercator, table, false); if (boundingBox == null) { boundingBox = geoPackage.getContentsBoundingBox(webMercator, table); } if (boundingBox == null && manual) { boundingBox = geoPackage.getFeatureBoundingBox(webMercator, table, manual); } if (boundingBox != null) { boundingBox = TileBoundingBoxUtils .boundWebMercatorBoundingBox(boundingBox); BoundingBox expandedBoundingBox = boundingBox .squareExpand(bufferPercentage); expandedBoundingBox = TileBoundingBoxUtils .boundWebMercatorBoundingBox(expandedBoundingBox); int zoom = TileBoundingBoxUtils.getZoomLevel(expandedBoundingBox); FeatureResultSet results = featureDao.query( columns.toArray(new String[] {}), where, whereArgs, null, null, null, limit != null ? limit.toString() : null); image = featureTiles.drawTile(zoom, expandedBoundingBox, results); } return image; } }
package com.planet_ink.coffee_mud.Abilities.Druid; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2002-2022 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Chant_FeelCold extends Chant { @Override public String ID() { return "Chant_FeelCold"; } private final static String localizedName = CMLib.lang().L("Feel Cold"); @Override public String name() { return localizedName; } private final static String localizedStaticDisplay = CMLib.lang().L("(Feel Cold)"); @Override public String displayText() { return localizedStaticDisplay; } @Override public int classificationCode() { return Ability.ACODE_CHANT|Ability.DOMAIN_ENDURING; } @Override public int abstractQuality() { return Ability.QUALITY_MALICIOUS; } @Override protected int canAffectCode() { return CAN_MOBS; } @Override protected int canTargetCode() { return CAN_MOBS; } @Override public void unInvoke() { // undo the affects of this spell if(!(affected instanceof MOB)) return; final MOB mob=(MOB)affected; if(canBeUninvoked()) mob.tell(L("Your cold feeling is gone.")); super.unInvoke(); } @Override public boolean okMessage(final Environmental myHost, final CMMsg msg) { if(!super.okMessage(myHost,msg)) return false; if(!(affected instanceof MOB)) return true; final MOB mob=(MOB)affected; if((msg.amITarget(mob))&&(msg.targetMinor()==CMMsg.TYP_DAMAGE) &&(msg.sourceMinor()==CMMsg.TYP_COLD)) { final int recovery=(int)Math.round(CMath.mul(msg.value(),0.5+CMath.mul(super.getXLEVELLevel(mob), 0.05))); msg.setValue(msg.value()+recovery); } return true; } @Override public void affectCharStats(final MOB affectedMOB, final CharStats affectedStats) { super.affectCharStats(affectedMOB,affectedStats); affectedStats.setStat(CharStats.STAT_SAVE_COLD,affectedStats.getStat(CharStats.STAT_SAVE_COLD)-100); } @Override public boolean tick(final Tickable ticking, final int tickID) { if(!super.tick(ticking,tickID)) return false; if(tickID!=Tickable.TICKID_MOB) return false; if((affecting()!=null)&&(affecting() instanceof MOB)) { final MOB M=(MOB)affecting(); final Room room=M.location(); if(room!=null) { final MOB invoker=(invoker()!=null) ? invoker() : M; if((room.getArea().getClimateObj().weatherType(room)==Climate.WEATHER_WINDY) &&((room.getClimateType()&Places.CLIMASK_COLD)>0) &&(CMLib.dice().rollPercentage()>M.charStats().getSave(CharStats.STAT_SAVE_COLD))) CMLib.combat().postDamage(invoker,M,null,1,CMMsg.MASK_MALICIOUS|CMMsg.MASK_ALWAYS|CMMsg.TYP_COLD,Weapon.TYPE_FROSTING,L("The cold biting wind <DAMAGE> <T-NAME>!")); else if((room.getArea().getClimateObj().weatherType(room)==Climate.WEATHER_WINTER_COLD) &&(CMLib.dice().rollPercentage()>M.charStats().getSave(CharStats.STAT_SAVE_COLD))) CMLib.combat().postDamage(invoker,M,null,1,CMMsg.MASK_MALICIOUS|CMMsg.MASK_ALWAYS|CMMsg.TYP_COLD,Weapon.TYPE_FROSTING,L("The biting cold <DAMAGE> <T-NAME>!")); else if((room.getArea().getClimateObj().weatherType(room)==Climate.WEATHER_SNOW) &&(CMLib.dice().rollPercentage()>M.charStats().getSave(CharStats.STAT_SAVE_COLD))) { final int damage=CMLib.dice().roll(1,8,0); CMLib.combat().postDamage(invoker,M,null,damage,CMMsg.MASK_MALICIOUS|CMMsg.MASK_ALWAYS|CMMsg.TYP_COLD,Weapon.TYPE_FROSTING,L("The blistering snow <DAMAGE> <T-NAME>!")); } else if((room.getArea().getClimateObj().weatherType(room)==Climate.WEATHER_BLIZZARD) &&(CMLib.dice().rollPercentage()>M.charStats().getSave(CharStats.STAT_SAVE_COLD))) { final int damage=CMLib.dice().roll(1,16,0); CMLib.combat().postDamage(invoker,M,null,damage,CMMsg.MASK_MALICIOUS|CMMsg.MASK_ALWAYS|CMMsg.TYP_COLD,Weapon.TYPE_FROSTING,L("The blizzard <DAMAGE> <T-NAME>!")); } else if((room.getArea().getClimateObj().weatherType(room)==Climate.WEATHER_HAIL) &&(CMLib.dice().rollPercentage()>M.charStats().getSave(CharStats.STAT_SAVE_COLD))) { final int damage=CMLib.dice().roll(1,8,0); CMLib.combat().postDamage(invoker,M,null,damage,CMMsg.MASK_MALICIOUS|CMMsg.MASK_ALWAYS|CMMsg.TYP_COLD,Weapon.TYPE_FROSTING,L("The biting hail <DAMAGE> <T-NAME>!")); } else return true; CMLib.combat().postRevengeAttack(M, invoker); } } return true; } @Override public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel) { final MOB target=this.getTarget(mob,commands,givenTarget); if(target==null) return false; if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); if(success) { invoker=mob; final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> chant(s) to <T-NAMESELF>.^?")); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); if(msg.value()<=0) { mob.location().show(target,null,CMMsg.MSG_OK_VISUAL,L("<S-NAME> feel(s) very cold.")); maliciousAffect(mob,target,asLevel,0,-1); } } } else return maliciousFizzle(mob,target,L("<S-NAME> chant(s) to <T-NAMESELF>, but the magic fades.")); // return whether it worked return success; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.storm.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class BoltStats implements org.apache.thrift.TBase<BoltStats, BoltStats._Fields>, java.io.Serializable, Cloneable, Comparable<BoltStats> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BoltStats"); private static final org.apache.thrift.protocol.TField ACKED_FIELD_DESC = new org.apache.thrift.protocol.TField("acked", org.apache.thrift.protocol.TType.MAP, (short)1); private static final org.apache.thrift.protocol.TField FAILED_FIELD_DESC = new org.apache.thrift.protocol.TField("failed", org.apache.thrift.protocol.TType.MAP, (short)2); private static final org.apache.thrift.protocol.TField PROCESS_MS_AVG_FIELD_DESC = new org.apache.thrift.protocol.TField("process_ms_avg", org.apache.thrift.protocol.TType.MAP, (short)3); private static final org.apache.thrift.protocol.TField EXECUTED_FIELD_DESC = new org.apache.thrift.protocol.TField("executed", org.apache.thrift.protocol.TType.MAP, (short)4); private static final org.apache.thrift.protocol.TField EXECUTE_MS_AVG_FIELD_DESC = new org.apache.thrift.protocol.TField("execute_ms_avg", org.apache.thrift.protocol.TType.MAP, (short)5); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new BoltStatsStandardSchemeFactory()); schemes.put(TupleScheme.class, new BoltStatsTupleSchemeFactory()); } private Map<String,Map<GlobalStreamId,Long>> acked; // required private Map<String,Map<GlobalStreamId,Long>> failed; // required private Map<String,Map<GlobalStreamId,Double>> process_ms_avg; // required private Map<String,Map<GlobalStreamId,Long>> executed; // required private Map<String,Map<GlobalStreamId,Double>> execute_ms_avg; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { ACKED((short)1, "acked"), FAILED((short)2, "failed"), PROCESS_MS_AVG((short)3, "process_ms_avg"), EXECUTED((short)4, "executed"), EXECUTE_MS_AVG((short)5, "execute_ms_avg"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // ACKED return ACKED; case 2: // FAILED return FAILED; case 3: // PROCESS_MS_AVG return PROCESS_MS_AVG; case 4: // EXECUTED return EXECUTED; case 5: // EXECUTE_MS_AVG return EXECUTE_MS_AVG; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.ACKED, new org.apache.thrift.meta_data.FieldMetaData("acked", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, GlobalStreamId.class), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))))); tmpMap.put(_Fields.FAILED, new org.apache.thrift.meta_data.FieldMetaData("failed", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, GlobalStreamId.class), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))))); tmpMap.put(_Fields.PROCESS_MS_AVG, new org.apache.thrift.meta_data.FieldMetaData("process_ms_avg", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, GlobalStreamId.class), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE))))); tmpMap.put(_Fields.EXECUTED, new org.apache.thrift.meta_data.FieldMetaData("executed", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, GlobalStreamId.class), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))))); tmpMap.put(_Fields.EXECUTE_MS_AVG, new org.apache.thrift.meta_data.FieldMetaData("execute_ms_avg", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, GlobalStreamId.class), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE))))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(BoltStats.class, metaDataMap); } public BoltStats() { } public BoltStats( Map<String,Map<GlobalStreamId,Long>> acked, Map<String,Map<GlobalStreamId,Long>> failed, Map<String,Map<GlobalStreamId,Double>> process_ms_avg, Map<String,Map<GlobalStreamId,Long>> executed, Map<String,Map<GlobalStreamId,Double>> execute_ms_avg) { this(); this.acked = acked; this.failed = failed; this.process_ms_avg = process_ms_avg; this.executed = executed; this.execute_ms_avg = execute_ms_avg; } /** * Performs a deep copy on <i>other</i>. */ public BoltStats(BoltStats other) { if (other.is_set_acked()) { Map<String,Map<GlobalStreamId,Long>> __this__acked = new HashMap<String,Map<GlobalStreamId,Long>>(other.acked.size()); for (Map.Entry<String, Map<GlobalStreamId,Long>> other_element : other.acked.entrySet()) { String other_element_key = other_element.getKey(); Map<GlobalStreamId,Long> other_element_value = other_element.getValue(); String __this__acked_copy_key = other_element_key; Map<GlobalStreamId,Long> __this__acked_copy_value = new HashMap<GlobalStreamId,Long>(other_element_value.size()); for (Map.Entry<GlobalStreamId, Long> other_element_value_element : other_element_value.entrySet()) { GlobalStreamId other_element_value_element_key = other_element_value_element.getKey(); Long other_element_value_element_value = other_element_value_element.getValue(); GlobalStreamId __this__acked_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key); Long __this__acked_copy_value_copy_value = other_element_value_element_value; __this__acked_copy_value.put(__this__acked_copy_value_copy_key, __this__acked_copy_value_copy_value); } __this__acked.put(__this__acked_copy_key, __this__acked_copy_value); } this.acked = __this__acked; } if (other.is_set_failed()) { Map<String,Map<GlobalStreamId,Long>> __this__failed = new HashMap<String,Map<GlobalStreamId,Long>>(other.failed.size()); for (Map.Entry<String, Map<GlobalStreamId,Long>> other_element : other.failed.entrySet()) { String other_element_key = other_element.getKey(); Map<GlobalStreamId,Long> other_element_value = other_element.getValue(); String __this__failed_copy_key = other_element_key; Map<GlobalStreamId,Long> __this__failed_copy_value = new HashMap<GlobalStreamId,Long>(other_element_value.size()); for (Map.Entry<GlobalStreamId, Long> other_element_value_element : other_element_value.entrySet()) { GlobalStreamId other_element_value_element_key = other_element_value_element.getKey(); Long other_element_value_element_value = other_element_value_element.getValue(); GlobalStreamId __this__failed_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key); Long __this__failed_copy_value_copy_value = other_element_value_element_value; __this__failed_copy_value.put(__this__failed_copy_value_copy_key, __this__failed_copy_value_copy_value); } __this__failed.put(__this__failed_copy_key, __this__failed_copy_value); } this.failed = __this__failed; } if (other.is_set_process_ms_avg()) { Map<String,Map<GlobalStreamId,Double>> __this__process_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(other.process_ms_avg.size()); for (Map.Entry<String, Map<GlobalStreamId,Double>> other_element : other.process_ms_avg.entrySet()) { String other_element_key = other_element.getKey(); Map<GlobalStreamId,Double> other_element_value = other_element.getValue(); String __this__process_ms_avg_copy_key = other_element_key; Map<GlobalStreamId,Double> __this__process_ms_avg_copy_value = new HashMap<GlobalStreamId,Double>(other_element_value.size()); for (Map.Entry<GlobalStreamId, Double> other_element_value_element : other_element_value.entrySet()) { GlobalStreamId other_element_value_element_key = other_element_value_element.getKey(); Double other_element_value_element_value = other_element_value_element.getValue(); GlobalStreamId __this__process_ms_avg_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key); Double __this__process_ms_avg_copy_value_copy_value = other_element_value_element_value; __this__process_ms_avg_copy_value.put(__this__process_ms_avg_copy_value_copy_key, __this__process_ms_avg_copy_value_copy_value); } __this__process_ms_avg.put(__this__process_ms_avg_copy_key, __this__process_ms_avg_copy_value); } this.process_ms_avg = __this__process_ms_avg; } if (other.is_set_executed()) { Map<String,Map<GlobalStreamId,Long>> __this__executed = new HashMap<String,Map<GlobalStreamId,Long>>(other.executed.size()); for (Map.Entry<String, Map<GlobalStreamId,Long>> other_element : other.executed.entrySet()) { String other_element_key = other_element.getKey(); Map<GlobalStreamId,Long> other_element_value = other_element.getValue(); String __this__executed_copy_key = other_element_key; Map<GlobalStreamId,Long> __this__executed_copy_value = new HashMap<GlobalStreamId,Long>(other_element_value.size()); for (Map.Entry<GlobalStreamId, Long> other_element_value_element : other_element_value.entrySet()) { GlobalStreamId other_element_value_element_key = other_element_value_element.getKey(); Long other_element_value_element_value = other_element_value_element.getValue(); GlobalStreamId __this__executed_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key); Long __this__executed_copy_value_copy_value = other_element_value_element_value; __this__executed_copy_value.put(__this__executed_copy_value_copy_key, __this__executed_copy_value_copy_value); } __this__executed.put(__this__executed_copy_key, __this__executed_copy_value); } this.executed = __this__executed; } if (other.is_set_execute_ms_avg()) { Map<String,Map<GlobalStreamId,Double>> __this__execute_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(other.execute_ms_avg.size()); for (Map.Entry<String, Map<GlobalStreamId,Double>> other_element : other.execute_ms_avg.entrySet()) { String other_element_key = other_element.getKey(); Map<GlobalStreamId,Double> other_element_value = other_element.getValue(); String __this__execute_ms_avg_copy_key = other_element_key; Map<GlobalStreamId,Double> __this__execute_ms_avg_copy_value = new HashMap<GlobalStreamId,Double>(other_element_value.size()); for (Map.Entry<GlobalStreamId, Double> other_element_value_element : other_element_value.entrySet()) { GlobalStreamId other_element_value_element_key = other_element_value_element.getKey(); Double other_element_value_element_value = other_element_value_element.getValue(); GlobalStreamId __this__execute_ms_avg_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key); Double __this__execute_ms_avg_copy_value_copy_value = other_element_value_element_value; __this__execute_ms_avg_copy_value.put(__this__execute_ms_avg_copy_value_copy_key, __this__execute_ms_avg_copy_value_copy_value); } __this__execute_ms_avg.put(__this__execute_ms_avg_copy_key, __this__execute_ms_avg_copy_value); } this.execute_ms_avg = __this__execute_ms_avg; } } public BoltStats deepCopy() { return new BoltStats(this); } @Override public void clear() { this.acked = null; this.failed = null; this.process_ms_avg = null; this.executed = null; this.execute_ms_avg = null; } public int get_acked_size() { return (this.acked == null) ? 0 : this.acked.size(); } public void put_to_acked(String key, Map<GlobalStreamId,Long> val) { if (this.acked == null) { this.acked = new HashMap<String,Map<GlobalStreamId,Long>>(); } this.acked.put(key, val); } public Map<String,Map<GlobalStreamId,Long>> get_acked() { return this.acked; } public void set_acked(Map<String,Map<GlobalStreamId,Long>> acked) { this.acked = acked; } public void unset_acked() { this.acked = null; } /** Returns true if field acked is set (has been assigned a value) and false otherwise */ public boolean is_set_acked() { return this.acked != null; } public void set_acked_isSet(boolean value) { if (!value) { this.acked = null; } } public int get_failed_size() { return (this.failed == null) ? 0 : this.failed.size(); } public void put_to_failed(String key, Map<GlobalStreamId,Long> val) { if (this.failed == null) { this.failed = new HashMap<String,Map<GlobalStreamId,Long>>(); } this.failed.put(key, val); } public Map<String,Map<GlobalStreamId,Long>> get_failed() { return this.failed; } public void set_failed(Map<String,Map<GlobalStreamId,Long>> failed) { this.failed = failed; } public void unset_failed() { this.failed = null; } /** Returns true if field failed is set (has been assigned a value) and false otherwise */ public boolean is_set_failed() { return this.failed != null; } public void set_failed_isSet(boolean value) { if (!value) { this.failed = null; } } public int get_process_ms_avg_size() { return (this.process_ms_avg == null) ? 0 : this.process_ms_avg.size(); } public void put_to_process_ms_avg(String key, Map<GlobalStreamId,Double> val) { if (this.process_ms_avg == null) { this.process_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(); } this.process_ms_avg.put(key, val); } public Map<String,Map<GlobalStreamId,Double>> get_process_ms_avg() { return this.process_ms_avg; } public void set_process_ms_avg(Map<String,Map<GlobalStreamId,Double>> process_ms_avg) { this.process_ms_avg = process_ms_avg; } public void unset_process_ms_avg() { this.process_ms_avg = null; } /** Returns true if field process_ms_avg is set (has been assigned a value) and false otherwise */ public boolean is_set_process_ms_avg() { return this.process_ms_avg != null; } public void set_process_ms_avg_isSet(boolean value) { if (!value) { this.process_ms_avg = null; } } public int get_executed_size() { return (this.executed == null) ? 0 : this.executed.size(); } public void put_to_executed(String key, Map<GlobalStreamId,Long> val) { if (this.executed == null) { this.executed = new HashMap<String,Map<GlobalStreamId,Long>>(); } this.executed.put(key, val); } public Map<String,Map<GlobalStreamId,Long>> get_executed() { return this.executed; } public void set_executed(Map<String,Map<GlobalStreamId,Long>> executed) { this.executed = executed; } public void unset_executed() { this.executed = null; } /** Returns true if field executed is set (has been assigned a value) and false otherwise */ public boolean is_set_executed() { return this.executed != null; } public void set_executed_isSet(boolean value) { if (!value) { this.executed = null; } } public int get_execute_ms_avg_size() { return (this.execute_ms_avg == null) ? 0 : this.execute_ms_avg.size(); } public void put_to_execute_ms_avg(String key, Map<GlobalStreamId,Double> val) { if (this.execute_ms_avg == null) { this.execute_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(); } this.execute_ms_avg.put(key, val); } public Map<String,Map<GlobalStreamId,Double>> get_execute_ms_avg() { return this.execute_ms_avg; } public void set_execute_ms_avg(Map<String,Map<GlobalStreamId,Double>> execute_ms_avg) { this.execute_ms_avg = execute_ms_avg; } public void unset_execute_ms_avg() { this.execute_ms_avg = null; } /** Returns true if field execute_ms_avg is set (has been assigned a value) and false otherwise */ public boolean is_set_execute_ms_avg() { return this.execute_ms_avg != null; } public void set_execute_ms_avg_isSet(boolean value) { if (!value) { this.execute_ms_avg = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case ACKED: if (value == null) { unset_acked(); } else { set_acked((Map<String,Map<GlobalStreamId,Long>>)value); } break; case FAILED: if (value == null) { unset_failed(); } else { set_failed((Map<String,Map<GlobalStreamId,Long>>)value); } break; case PROCESS_MS_AVG: if (value == null) { unset_process_ms_avg(); } else { set_process_ms_avg((Map<String,Map<GlobalStreamId,Double>>)value); } break; case EXECUTED: if (value == null) { unset_executed(); } else { set_executed((Map<String,Map<GlobalStreamId,Long>>)value); } break; case EXECUTE_MS_AVG: if (value == null) { unset_execute_ms_avg(); } else { set_execute_ms_avg((Map<String,Map<GlobalStreamId,Double>>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case ACKED: return get_acked(); case FAILED: return get_failed(); case PROCESS_MS_AVG: return get_process_ms_avg(); case EXECUTED: return get_executed(); case EXECUTE_MS_AVG: return get_execute_ms_avg(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case ACKED: return is_set_acked(); case FAILED: return is_set_failed(); case PROCESS_MS_AVG: return is_set_process_ms_avg(); case EXECUTED: return is_set_executed(); case EXECUTE_MS_AVG: return is_set_execute_ms_avg(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof BoltStats) return this.equals((BoltStats)that); return false; } public boolean equals(BoltStats that) { if (that == null) return false; boolean this_present_acked = true && this.is_set_acked(); boolean that_present_acked = true && that.is_set_acked(); if (this_present_acked || that_present_acked) { if (!(this_present_acked && that_present_acked)) return false; if (!this.acked.equals(that.acked)) return false; } boolean this_present_failed = true && this.is_set_failed(); boolean that_present_failed = true && that.is_set_failed(); if (this_present_failed || that_present_failed) { if (!(this_present_failed && that_present_failed)) return false; if (!this.failed.equals(that.failed)) return false; } boolean this_present_process_ms_avg = true && this.is_set_process_ms_avg(); boolean that_present_process_ms_avg = true && that.is_set_process_ms_avg(); if (this_present_process_ms_avg || that_present_process_ms_avg) { if (!(this_present_process_ms_avg && that_present_process_ms_avg)) return false; if (!this.process_ms_avg.equals(that.process_ms_avg)) return false; } boolean this_present_executed = true && this.is_set_executed(); boolean that_present_executed = true && that.is_set_executed(); if (this_present_executed || that_present_executed) { if (!(this_present_executed && that_present_executed)) return false; if (!this.executed.equals(that.executed)) return false; } boolean this_present_execute_ms_avg = true && this.is_set_execute_ms_avg(); boolean that_present_execute_ms_avg = true && that.is_set_execute_ms_avg(); if (this_present_execute_ms_avg || that_present_execute_ms_avg) { if (!(this_present_execute_ms_avg && that_present_execute_ms_avg)) return false; if (!this.execute_ms_avg.equals(that.execute_ms_avg)) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_acked = true && (is_set_acked()); list.add(present_acked); if (present_acked) list.add(acked); boolean present_failed = true && (is_set_failed()); list.add(present_failed); if (present_failed) list.add(failed); boolean present_process_ms_avg = true && (is_set_process_ms_avg()); list.add(present_process_ms_avg); if (present_process_ms_avg) list.add(process_ms_avg); boolean present_executed = true && (is_set_executed()); list.add(present_executed); if (present_executed) list.add(executed); boolean present_execute_ms_avg = true && (is_set_execute_ms_avg()); list.add(present_execute_ms_avg); if (present_execute_ms_avg) list.add(execute_ms_avg); return list.hashCode(); } @Override public int compareTo(BoltStats other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(is_set_acked()).compareTo(other.is_set_acked()); if (lastComparison != 0) { return lastComparison; } if (is_set_acked()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.acked, other.acked); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_failed()).compareTo(other.is_set_failed()); if (lastComparison != 0) { return lastComparison; } if (is_set_failed()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.failed, other.failed); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_process_ms_avg()).compareTo(other.is_set_process_ms_avg()); if (lastComparison != 0) { return lastComparison; } if (is_set_process_ms_avg()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.process_ms_avg, other.process_ms_avg); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_executed()).compareTo(other.is_set_executed()); if (lastComparison != 0) { return lastComparison; } if (is_set_executed()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.executed, other.executed); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_execute_ms_avg()).compareTo(other.is_set_execute_ms_avg()); if (lastComparison != 0) { return lastComparison; } if (is_set_execute_ms_avg()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.execute_ms_avg, other.execute_ms_avg); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("BoltStats("); boolean first = true; sb.append("acked:"); if (this.acked == null) { sb.append("null"); } else { sb.append(this.acked); } first = false; if (!first) sb.append(", "); sb.append("failed:"); if (this.failed == null) { sb.append("null"); } else { sb.append(this.failed); } first = false; if (!first) sb.append(", "); sb.append("process_ms_avg:"); if (this.process_ms_avg == null) { sb.append("null"); } else { sb.append(this.process_ms_avg); } first = false; if (!first) sb.append(", "); sb.append("executed:"); if (this.executed == null) { sb.append("null"); } else { sb.append(this.executed); } first = false; if (!first) sb.append(", "); sb.append("execute_ms_avg:"); if (this.execute_ms_avg == null) { sb.append("null"); } else { sb.append(this.execute_ms_avg); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!is_set_acked()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'acked' is unset! Struct:" + toString()); } if (!is_set_failed()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'failed' is unset! Struct:" + toString()); } if (!is_set_process_ms_avg()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'process_ms_avg' is unset! Struct:" + toString()); } if (!is_set_executed()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'executed' is unset! Struct:" + toString()); } if (!is_set_execute_ms_avg()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'execute_ms_avg' is unset! Struct:" + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class BoltStatsStandardSchemeFactory implements SchemeFactory { public BoltStatsStandardScheme getScheme() { return new BoltStatsStandardScheme(); } } private static class BoltStatsStandardScheme extends StandardScheme<BoltStats> { public void read(org.apache.thrift.protocol.TProtocol iprot, BoltStats struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // ACKED if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map132 = iprot.readMapBegin(); struct.acked = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map132.size); String _key133; Map<GlobalStreamId,Long> _val134; for (int _i135 = 0; _i135 < _map132.size; ++_i135) { _key133 = iprot.readString(); { org.apache.thrift.protocol.TMap _map136 = iprot.readMapBegin(); _val134 = new HashMap<GlobalStreamId,Long>(2*_map136.size); GlobalStreamId _key137; long _val138; for (int _i139 = 0; _i139 < _map136.size; ++_i139) { _key137 = new GlobalStreamId(); _key137.read(iprot); _val138 = iprot.readI64(); _val134.put(_key137, _val138); } iprot.readMapEnd(); } struct.acked.put(_key133, _val134); } iprot.readMapEnd(); } struct.set_acked_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // FAILED if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map140 = iprot.readMapBegin(); struct.failed = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map140.size); String _key141; Map<GlobalStreamId,Long> _val142; for (int _i143 = 0; _i143 < _map140.size; ++_i143) { _key141 = iprot.readString(); { org.apache.thrift.protocol.TMap _map144 = iprot.readMapBegin(); _val142 = new HashMap<GlobalStreamId,Long>(2*_map144.size); GlobalStreamId _key145; long _val146; for (int _i147 = 0; _i147 < _map144.size; ++_i147) { _key145 = new GlobalStreamId(); _key145.read(iprot); _val146 = iprot.readI64(); _val142.put(_key145, _val146); } iprot.readMapEnd(); } struct.failed.put(_key141, _val142); } iprot.readMapEnd(); } struct.set_failed_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // PROCESS_MS_AVG if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map148 = iprot.readMapBegin(); struct.process_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(2*_map148.size); String _key149; Map<GlobalStreamId,Double> _val150; for (int _i151 = 0; _i151 < _map148.size; ++_i151) { _key149 = iprot.readString(); { org.apache.thrift.protocol.TMap _map152 = iprot.readMapBegin(); _val150 = new HashMap<GlobalStreamId,Double>(2*_map152.size); GlobalStreamId _key153; double _val154; for (int _i155 = 0; _i155 < _map152.size; ++_i155) { _key153 = new GlobalStreamId(); _key153.read(iprot); _val154 = iprot.readDouble(); _val150.put(_key153, _val154); } iprot.readMapEnd(); } struct.process_ms_avg.put(_key149, _val150); } iprot.readMapEnd(); } struct.set_process_ms_avg_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // EXECUTED if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map156 = iprot.readMapBegin(); struct.executed = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map156.size); String _key157; Map<GlobalStreamId,Long> _val158; for (int _i159 = 0; _i159 < _map156.size; ++_i159) { _key157 = iprot.readString(); { org.apache.thrift.protocol.TMap _map160 = iprot.readMapBegin(); _val158 = new HashMap<GlobalStreamId,Long>(2*_map160.size); GlobalStreamId _key161; long _val162; for (int _i163 = 0; _i163 < _map160.size; ++_i163) { _key161 = new GlobalStreamId(); _key161.read(iprot); _val162 = iprot.readI64(); _val158.put(_key161, _val162); } iprot.readMapEnd(); } struct.executed.put(_key157, _val158); } iprot.readMapEnd(); } struct.set_executed_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // EXECUTE_MS_AVG if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map164 = iprot.readMapBegin(); struct.execute_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(2*_map164.size); String _key165; Map<GlobalStreamId,Double> _val166; for (int _i167 = 0; _i167 < _map164.size; ++_i167) { _key165 = iprot.readString(); { org.apache.thrift.protocol.TMap _map168 = iprot.readMapBegin(); _val166 = new HashMap<GlobalStreamId,Double>(2*_map168.size); GlobalStreamId _key169; double _val170; for (int _i171 = 0; _i171 < _map168.size; ++_i171) { _key169 = new GlobalStreamId(); _key169.read(iprot); _val170 = iprot.readDouble(); _val166.put(_key169, _val170); } iprot.readMapEnd(); } struct.execute_ms_avg.put(_key165, _val166); } iprot.readMapEnd(); } struct.set_execute_ms_avg_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, BoltStats struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.acked != null) { oprot.writeFieldBegin(ACKED_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, struct.acked.size())); for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter172 : struct.acked.entrySet()) { oprot.writeString(_iter172.getKey()); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.I64, _iter172.getValue().size())); for (Map.Entry<GlobalStreamId, Long> _iter173 : _iter172.getValue().entrySet()) { _iter173.getKey().write(oprot); oprot.writeI64(_iter173.getValue()); } oprot.writeMapEnd(); } } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (struct.failed != null) { oprot.writeFieldBegin(FAILED_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, struct.failed.size())); for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter174 : struct.failed.entrySet()) { oprot.writeString(_iter174.getKey()); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.I64, _iter174.getValue().size())); for (Map.Entry<GlobalStreamId, Long> _iter175 : _iter174.getValue().entrySet()) { _iter175.getKey().write(oprot); oprot.writeI64(_iter175.getValue()); } oprot.writeMapEnd(); } } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (struct.process_ms_avg != null) { oprot.writeFieldBegin(PROCESS_MS_AVG_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, struct.process_ms_avg.size())); for (Map.Entry<String, Map<GlobalStreamId,Double>> _iter176 : struct.process_ms_avg.entrySet()) { oprot.writeString(_iter176.getKey()); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.DOUBLE, _iter176.getValue().size())); for (Map.Entry<GlobalStreamId, Double> _iter177 : _iter176.getValue().entrySet()) { _iter177.getKey().write(oprot); oprot.writeDouble(_iter177.getValue()); } oprot.writeMapEnd(); } } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (struct.executed != null) { oprot.writeFieldBegin(EXECUTED_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, struct.executed.size())); for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter178 : struct.executed.entrySet()) { oprot.writeString(_iter178.getKey()); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.I64, _iter178.getValue().size())); for (Map.Entry<GlobalStreamId, Long> _iter179 : _iter178.getValue().entrySet()) { _iter179.getKey().write(oprot); oprot.writeI64(_iter179.getValue()); } oprot.writeMapEnd(); } } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (struct.execute_ms_avg != null) { oprot.writeFieldBegin(EXECUTE_MS_AVG_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, struct.execute_ms_avg.size())); for (Map.Entry<String, Map<GlobalStreamId,Double>> _iter180 : struct.execute_ms_avg.entrySet()) { oprot.writeString(_iter180.getKey()); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.DOUBLE, _iter180.getValue().size())); for (Map.Entry<GlobalStreamId, Double> _iter181 : _iter180.getValue().entrySet()) { _iter181.getKey().write(oprot); oprot.writeDouble(_iter181.getValue()); } oprot.writeMapEnd(); } } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class BoltStatsTupleSchemeFactory implements SchemeFactory { public BoltStatsTupleScheme getScheme() { return new BoltStatsTupleScheme(); } } private static class BoltStatsTupleScheme extends TupleScheme<BoltStats> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, BoltStats struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; { oprot.writeI32(struct.acked.size()); for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter182 : struct.acked.entrySet()) { oprot.writeString(_iter182.getKey()); { oprot.writeI32(_iter182.getValue().size()); for (Map.Entry<GlobalStreamId, Long> _iter183 : _iter182.getValue().entrySet()) { _iter183.getKey().write(oprot); oprot.writeI64(_iter183.getValue()); } } } } { oprot.writeI32(struct.failed.size()); for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter184 : struct.failed.entrySet()) { oprot.writeString(_iter184.getKey()); { oprot.writeI32(_iter184.getValue().size()); for (Map.Entry<GlobalStreamId, Long> _iter185 : _iter184.getValue().entrySet()) { _iter185.getKey().write(oprot); oprot.writeI64(_iter185.getValue()); } } } } { oprot.writeI32(struct.process_ms_avg.size()); for (Map.Entry<String, Map<GlobalStreamId,Double>> _iter186 : struct.process_ms_avg.entrySet()) { oprot.writeString(_iter186.getKey()); { oprot.writeI32(_iter186.getValue().size()); for (Map.Entry<GlobalStreamId, Double> _iter187 : _iter186.getValue().entrySet()) { _iter187.getKey().write(oprot); oprot.writeDouble(_iter187.getValue()); } } } } { oprot.writeI32(struct.executed.size()); for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter188 : struct.executed.entrySet()) { oprot.writeString(_iter188.getKey()); { oprot.writeI32(_iter188.getValue().size()); for (Map.Entry<GlobalStreamId, Long> _iter189 : _iter188.getValue().entrySet()) { _iter189.getKey().write(oprot); oprot.writeI64(_iter189.getValue()); } } } } { oprot.writeI32(struct.execute_ms_avg.size()); for (Map.Entry<String, Map<GlobalStreamId,Double>> _iter190 : struct.execute_ms_avg.entrySet()) { oprot.writeString(_iter190.getKey()); { oprot.writeI32(_iter190.getValue().size()); for (Map.Entry<GlobalStreamId, Double> _iter191 : _iter190.getValue().entrySet()) { _iter191.getKey().write(oprot); oprot.writeDouble(_iter191.getValue()); } } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, BoltStats struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; { org.apache.thrift.protocol.TMap _map192 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, iprot.readI32()); struct.acked = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map192.size); String _key193; Map<GlobalStreamId,Long> _val194; for (int _i195 = 0; _i195 < _map192.size; ++_i195) { _key193 = iprot.readString(); { org.apache.thrift.protocol.TMap _map196 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.I64, iprot.readI32()); _val194 = new HashMap<GlobalStreamId,Long>(2*_map196.size); GlobalStreamId _key197; long _val198; for (int _i199 = 0; _i199 < _map196.size; ++_i199) { _key197 = new GlobalStreamId(); _key197.read(iprot); _val198 = iprot.readI64(); _val194.put(_key197, _val198); } } struct.acked.put(_key193, _val194); } } struct.set_acked_isSet(true); { org.apache.thrift.protocol.TMap _map200 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, iprot.readI32()); struct.failed = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map200.size); String _key201; Map<GlobalStreamId,Long> _val202; for (int _i203 = 0; _i203 < _map200.size; ++_i203) { _key201 = iprot.readString(); { org.apache.thrift.protocol.TMap _map204 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.I64, iprot.readI32()); _val202 = new HashMap<GlobalStreamId,Long>(2*_map204.size); GlobalStreamId _key205; long _val206; for (int _i207 = 0; _i207 < _map204.size; ++_i207) { _key205 = new GlobalStreamId(); _key205.read(iprot); _val206 = iprot.readI64(); _val202.put(_key205, _val206); } } struct.failed.put(_key201, _val202); } } struct.set_failed_isSet(true); { org.apache.thrift.protocol.TMap _map208 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, iprot.readI32()); struct.process_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(2*_map208.size); String _key209; Map<GlobalStreamId,Double> _val210; for (int _i211 = 0; _i211 < _map208.size; ++_i211) { _key209 = iprot.readString(); { org.apache.thrift.protocol.TMap _map212 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.DOUBLE, iprot.readI32()); _val210 = new HashMap<GlobalStreamId,Double>(2*_map212.size); GlobalStreamId _key213; double _val214; for (int _i215 = 0; _i215 < _map212.size; ++_i215) { _key213 = new GlobalStreamId(); _key213.read(iprot); _val214 = iprot.readDouble(); _val210.put(_key213, _val214); } } struct.process_ms_avg.put(_key209, _val210); } } struct.set_process_ms_avg_isSet(true); { org.apache.thrift.protocol.TMap _map216 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, iprot.readI32()); struct.executed = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map216.size); String _key217; Map<GlobalStreamId,Long> _val218; for (int _i219 = 0; _i219 < _map216.size; ++_i219) { _key217 = iprot.readString(); { org.apache.thrift.protocol.TMap _map220 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.I64, iprot.readI32()); _val218 = new HashMap<GlobalStreamId,Long>(2*_map220.size); GlobalStreamId _key221; long _val222; for (int _i223 = 0; _i223 < _map220.size; ++_i223) { _key221 = new GlobalStreamId(); _key221.read(iprot); _val222 = iprot.readI64(); _val218.put(_key221, _val222); } } struct.executed.put(_key217, _val218); } } struct.set_executed_isSet(true); { org.apache.thrift.protocol.TMap _map224 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, iprot.readI32()); struct.execute_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(2*_map224.size); String _key225; Map<GlobalStreamId,Double> _val226; for (int _i227 = 0; _i227 < _map224.size; ++_i227) { _key225 = iprot.readString(); { org.apache.thrift.protocol.TMap _map228 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.DOUBLE, iprot.readI32()); _val226 = new HashMap<GlobalStreamId,Double>(2*_map228.size); GlobalStreamId _key229; double _val230; for (int _i231 = 0; _i231 < _map228.size; ++_i231) { _key229 = new GlobalStreamId(); _key229.read(iprot); _val230 = iprot.readDouble(); _val226.put(_key229, _val230); } } struct.execute_ms_avg.put(_key225, _val226); } } struct.set_execute_ms_avg_isSet(true); } } }
/******************************************************************************* ** ** Advanced Distributed Learning Co-Laboratory (ADL Co-Lab) Hub grants you ** ("Licensee") a non-exclusive, royalty free, license to use, modify and ** redistribute this software in source and binary code form, provided that ** i) this copyright notice and license appear on all copies of the software; ** and ii) Licensee does not utilize the software in a manner which is ** disparaging to ADL Co-Lab Hub. ** ** This software is provided "AS IS," without a warranty of any kind. ALL ** EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING ** ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE ** OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. ADL Co-Lab Hub AND ITS LICENSORS ** SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF ** USING, MODIFYING OR DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO ** EVENT WILL ADL Co-Lab Hub OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, ** PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, ** INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE ** THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE ** SOFTWARE, EVEN IF ADL Co-Lab Hub HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH ** DAMAGES. ** *******************************************************************************/ package org.adl.validator.contentpackage; // native java imports import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; import org.adl.parsers.dom.DOMTreeUtility; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; /** * <strong>Filename: </strong><br>ManifestHandler.java<br><br> * * <strong>Description: </strong>This method tracks, stores and retrieves the * Launch Data information of SCOs and the Metadata information, all of which is * found or referenced from within the content package test subject. * * @author ADL Technical Team */ public class ManifestHandler implements Serializable { /** * */ private static final long serialVersionUID = 1L; /** * This method returns the default organization node that is flagged * by the default attribute. This method serves as a helper method. * * @param iRootNode root node of test subject dom. * * @return Node default organization */ public static Node getDefaultOrganizationNode(Node iRootNode) { Node result = null; // find the value of the "default" attribute of the <organizations> node Node organizationsNode = DOMTreeUtility.getNode(iRootNode, "organizations"); if (organizationsNode != null) { NamedNodeMap attrList = organizationsNode.getAttributes(); String defaultIDValue = (attrList.getNamedItem("default")).getNodeValue(); // traverse the <organization> nodes and find the matching default ID NodeList children = organizationsNode.getChildNodes(); if (children != null) { int numChildren = children.getLength(); for (int i = 0; i < numChildren; i++) { Node currentChild = children.item(i); String currentChildName = currentChild.getLocalName(); if (currentChildName.equals("organization")) { // find the value of the "identifier" attribute of the // <organization> node NamedNodeMap orgAttrList = currentChild.getAttributes(); String idValue = (orgAttrList.getNamedItem("identifier")).getNodeValue(); if (idValue.equals(defaultIDValue)) { result = currentChild; break; } } } } } return result; } /** * This method retrieves all the organization nodes from the content package * manifest dom. This method serves as a helper for retrieving SCO * launch data. * * @param iDefaultOrganizationOnly boolean describing the scope of the * organization that should be traversed for SCO launch data. Specific * to SRTE uses - will no longer be needed in future development. * * @param iRootNode root node of test subject dom. * * @return List Containing a list of organization nodes. */ public static List<Node> getOrganizationNodes(Node iRootNode, boolean iDefaultOrganizationOnly) { //mLogger.entering("ManifestHandler", "getOrganizationNodes()"); List<Node> result = new ArrayList<Node>(); if (iDefaultOrganizationOnly) { result.add(getDefaultOrganizationNode(iRootNode)); } else { // get the list of organization nodes Node organizationsNode = DOMTreeUtility.getNode(iRootNode, "organizations"); if (organizationsNode != null) { NodeList children = organizationsNode.getChildNodes(); if (children != null) { int numChildren = children.getLength(); for (int i = 0; i < numChildren; i++) { Node currentChild = children.item(i); String currentChildName = currentChild.getLocalName(); if (currentChildName.equals("organization")) { // add the organization node to the resulting list result.add(currentChild); } } } } } return result; } /** * This method returns the list of resources related to SSP. <br> * * @param iRootNode Node object that contains SSP resources * @return List of SSP resources<br> */ public static List<Node> getSSPResourceList(Node iRootNode) { //take List<Node> result = new ArrayList<Node>(); // get the list of resource nodes Node resourcesNode = DOMTreeUtility.getNode(iRootNode, "resources"); //NodeList resources = resourcesNode.getChildNodes(); List<Node> resource = DOMTreeUtility.getNodes(resourcesNode, "resource"); if (resource != null) { //int numResources = resources.getLength(); int numResources = resource.size(); for (int i = 0; i < numResources; i++) { Node child = resource.get(i); NodeList children = child.getChildNodes(); int numChildren = children.getLength(); for (int j = 0; j < numChildren; j++) { Node currentChild = children.item(j); String currentChildName = currentChild.getLocalName(); if (currentChildName.equals("bucket")) { // add the organization node to the resulting list result.add(child); break; } } } } return result; } /** * This attribute serves as the Logger object used for debug logging. */ private transient Logger mLogger = Logger.getLogger("org.adl.util.debug.validator"); /** * This attribute describes whether or not SCO launch data has been * tracked. */ private boolean mLaunchDataTracked; /** * This attribute describes whether or not the metadata information was * tracked. */ private boolean mMetadataTracked; /** * This attribute serves as the storage list of the tracked metadata * information. This list will contain the following information: If inline * metadata, than the root node will be stored here along with the * metadata application profile type. If external metadata, than the URI to * the metadata will be stored along with the metadata application profile * type. */ private List<MetadataData> mMetadataDataList; /** * This attribute serves as the storage list of the tracked SCO launch data. * This list uses the default organization and does not comply with given * sequencing rules. This list can be used for default behavior and * testing purposes. */ private List<LaunchData> mLaunchDataList; /** * This attribute contains the xml:base value created from the &lt;manifest&gt; * and &lt;resources&gt; elements. * */ private String mManifestResourcesXMLBase; /** * This attribute contains the xml:base value created from each &lt;resource&gt; * element. It will complete the xml:base value after being appended to * the mManifestResourceXMLBase attribute. */ private String mResourceXMLBase; /** * This attribute contains a list of the metadata as referenced in the * IMS Manifest by the adlcp:location element. */ private List<String> mLocationList; /** * used in all areas where an empty string was checked for, or a string was set to "" */ private String mEMPTY_STRING = ""; /** * Default Constructor. Sets the attributes to their initial values. */ public ManifestHandler() { mLaunchDataTracked = false; mMetadataTracked = false; mMetadataDataList = new ArrayList<MetadataData>(); mLaunchDataList = new ArrayList<LaunchData>(); mManifestResourcesXMLBase = ""; mResourceXMLBase = ""; mLocationList = new ArrayList<String>(); } /** * This method retrieves the information described by the &lt;item&gt; * element and saves it for SCO launch data information. This method * traverses the &lt;item&gt;s of the &lt;organization&gt; recursively and * retrieves the identifiers, referenced identifier references and * corresponding parameters from the &lt;resources&gt; element. * * @param iNode The organization node. * * @param iOrgID The ID of the organization. */ private void addItemInfo(Node iNode, String iOrgID) { mLogger.entering("ManifestHandler", "addItemInfo()"); if (iNode == null) return; int type = iNode.getNodeType(); String orgID = iOrgID; switch (type) { // document node // this is a fail safe case to handle an error where a document node // is passed case Node.DOCUMENT_NODE: { Node rootNode = ((Document) iNode).getDocumentElement(); addItemInfo(rootNode, orgID); break; } // element node case Node.ELEMENT_NODE: { String nodeName = iNode.getLocalName(); // get the needed values of the attributes if (nodeName.equals("item")) { String orgIdentifier = mEMPTY_STRING; String identifier = mEMPTY_STRING; String identifierref = mEMPTY_STRING; String parameters = mEMPTY_STRING; String title = mEMPTY_STRING; String dataFromLMS = mEMPTY_STRING; String timeLimitAction = mEMPTY_STRING; String completionThreshold = mEMPTY_STRING; String objectiveslist = mEMPTY_STRING; boolean previous = false; boolean shouldContinue = false; boolean exit = false; boolean exitAll = false; boolean abandon = false; boolean suspendAll = false; //Assign orgIdentifier the value of the parameter iOrgID orgIdentifier = iOrgID; // get the value of the following attributes: // - identifier // - identifierref // - parameters // // leave the value at "" is the attribute does not exist NamedNodeMap attrList = iNode.getAttributes(); int numAttr = attrList.getLength(); Attr currentAttrNode; String currentNodeName; // loop through the attributes and get their values assuming that // the multiplicity of each attribute is 1 and only 1. for (int i = 0; i < numAttr; i++) { currentAttrNode = (Attr) attrList.item(i); currentNodeName = currentAttrNode.getLocalName(); // store the value of the attribute if (currentNodeName.equalsIgnoreCase("identifier")) { identifier = currentAttrNode.getValue(); } else if (currentNodeName.equalsIgnoreCase("identifierref")) { identifierref = currentAttrNode.getValue(); } else if (currentNodeName.equalsIgnoreCase("parameters")) { parameters = currentAttrNode.getValue(); } } // get the value of the title element // assume that there is 1 and only 1 child named title title = DOMTreeUtility.getNodeValue(DOMTreeUtility.getNode(iNode, "title")); // get the value of the datafromlms element dataFromLMS = DOMTreeUtility.getNodeValue(DOMTreeUtility.getNode(iNode, "dataFromLMS")); // get the value of the timelimitaction element timeLimitAction = DOMTreeUtility.getNodeValue(DOMTreeUtility.getNode(iNode, "timeLimitAction")); // get the value of the completionThreshold element completionThreshold = DOMTreeUtility.getNodeValue(DOMTreeUtility.getNode(iNode, "completionThreshold")); //Gets the sequencing objectives list for this item objectiveslist = getObjectivesList(DOMTreeUtility.getNode(iNode, "sequencing")); //get the hideRTSUI elements and set the previous, continue, //exit, exitAll, abandon and suspendAll variables accordingly. Node presentationNode = DOMTreeUtility.getNode(iNode, "presentation"); if (presentationNode != null) { Node navInterfaceNode = DOMTreeUtility.getNode(presentationNode, "navigationInterface"); if (navInterfaceNode != null) { NodeList children = navInterfaceNode.getChildNodes(); if (children != null) { int numChildren = children.getLength(); for (int i = 0; i < numChildren; i++) { Node currentChild = children.item(i); String currentChildName = currentChild.getLocalName(); if (currentChildName.equals("hideLMSUI")) { String currentChildValue = DOMTreeUtility.getNodeValue(currentChild); if (currentChildValue.equals("previous")) { previous = true; } else if (currentChildValue.equals("continue")) { shouldContinue = true; } else if (currentChildValue.equals("exit")) { exit = true; } else if (currentChildValue.equals("exitAll")) { exitAll = true; } else if (currentChildValue.equals("abandon")) { abandon = true; } else if (currentChildValue.equals("suspendAll")) { suspendAll = true; } } } } } } // make sure this item actually points to a <resource> if (!identifierref.equals(mEMPTY_STRING)) { // create an instance of the LaunchData data structure and // add it to the LaunchDataList LaunchData launchData = new LaunchData(); launchData.setOrganizationIdentifier(orgIdentifier); launchData.setItemIdentifier(identifier); launchData.setResourceIdentifier(identifierref); launchData.setParameters(parameters); launchData.setItemTitle(title); launchData.setDataFromLMS(dataFromLMS); launchData.setTimeLimitAction(timeLimitAction); launchData.setCompletionThreshold(completionThreshold); launchData.setPrevious(previous); launchData.setContinue(shouldContinue); launchData.setExit(exit); launchData.setExitAll(exitAll); launchData.setAbandon(abandon); launchData.setSuspendAll(suspendAll); launchData.setMinNormalizedMeasure(getMinNormalizedMeasure(iNode)); launchData.setAttemptAbsoluteDurationLimit(getAttemptAbsoluteDurationLimit(iNode)); launchData.setObjectivesList(objectiveslist); mLaunchDataList.add(launchData); } } // get the child nodes and add their items info NodeList children = iNode.getChildNodes(); if (children != null) { int numChildren = children.getLength(); Node currentChild; for (int z = 0; z < numChildren; z++) { currentChild = children.item(z); addItemInfo(currentChild, orgID); } } } // handle all other node types default: { break; } } } /** * This method uses the information stored in the SCO Launch Data List * to get the associated Resource level data. * * @param iRootNode root node of the DOM. * * @param iRemoveAssets boolean representing whether or not the assets should * be removed. (The Sample RTE will never want to * remove the assets, where as the TestSuite will.) */ private void addResourceInfo(Node iRootNode, boolean iRemoveAssets) { // get the <resources> node Node resourcesNode = DOMTreeUtility.getNode(iRootNode, "resources"); String scormType = mEMPTY_STRING; String location = mEMPTY_STRING; String xmlBase = mEMPTY_STRING; // launch data processing stuff int size = mLaunchDataList.size(); LaunchData currentLaunchData; String resourceIdentifier = mEMPTY_STRING; String persistState = mEMPTY_STRING; Node matchingResourceNode = null; // here we are dealing with a content aggregation package for (int i = 0; i < size; i++) { currentLaunchData = mLaunchDataList.get(i); resourceIdentifier = currentLaunchData.getResourceIdentifier(); matchingResourceNode = getResourceNodeWithIdentifier(resourcesNode, resourceIdentifier); // Ensure resource node exists if (matchingResourceNode != null) { // get the value of the following attributes: // - adlcp:scormtype // - href // - xml:base // // leave the value at "" is the attribute does not exist scormType = DOMTreeUtility.getAttributeValue(matchingResourceNode, "scormType"); location = DOMTreeUtility.getAttributeValue(matchingResourceNode, "href"); xmlBase = DOMTreeUtility.getAttributeValue(matchingResourceNode, "base"); persistState = DOMTreeUtility.getAttributeValue(matchingResourceNode, "persistState"); } // populate the current Launch Data with the resource level values currentLaunchData.setSCORMType(scormType); currentLaunchData.setLocation(location); currentLaunchData.setResourceXMLBase(xmlBase); currentLaunchData.setPersistState(persistState); try { mLaunchDataList.set(i, currentLaunchData); } catch (ArrayIndexOutOfBoundsException aioobe) { System.out.println("ArrayIndexOutOfBoundsException caught on " + "List currentLaunchData. Attempted index " + "access is " + i + "size of List is " + mLaunchDataList.size()); } } if (size == 0) // then we are dealing with a resource package { // loop through resources to retieve all resource information // loop through the children of <resources> NodeList children = resourcesNode.getChildNodes(); if (children != null) { int childrenSize = children.getLength(); for (int z = 0; z < childrenSize; z++) { Node currentNode = children.item(z); String currentNodeName = currentNode.getLocalName(); if (currentNodeName.equals("resource")) { // create an instance of the LaunchData data structure and // add it to the LaunchDataList LaunchData launchData = new LaunchData(); // get the value adlcp:scormtype, href, base attribute // leave the value at "" is the attribute does not exist scormType = DOMTreeUtility.getAttributeValue(currentNode, "scormType"); location = DOMTreeUtility.getAttributeValue(currentNode, "href"); xmlBase = DOMTreeUtility.getAttributeValue(currentNode, "base"); resourceIdentifier = DOMTreeUtility.getAttributeValue(currentNode, "identifier"); // populate the Launch Data with the resource level values launchData.setSCORMType(scormType); launchData.setLocation(location); launchData.setResourceXMLBase(xmlBase); launchData.setResourceIdentifier(resourceIdentifier); mLaunchDataList.add(launchData); } // end if current node == resource } // end looping over children } // end if there are no children } // end if size == 0 if (iRemoveAssets) { removeAssetsFromLaunchDataList(); } } /** * This method returns a list of all the metadata that is referenced * via the &lt;adlcp:location&gt; element in an IMS Manifest. * * @param iNode element nodes traversed for metadata element. * **/ private void checkForAdlcpLocationMD(Node iNode) { if (iNode != null) { String nodeName = iNode.getLocalName(); if (nodeName != null) { if (nodeName.equals("manifest")) { // set and retrieve xml:base of manifest if it exists // must first clear out xml:base values if dealing with a sub if (!mManifestResourcesXMLBase.equals(mEMPTY_STRING)) { mManifestResourcesXMLBase = mEMPTY_STRING; mResourceXMLBase = mEMPTY_STRING; } String manifestXMLBase = DOMTreeUtility.getAttributeValue(iNode, "base"); if (!manifestXMLBase.equals(mEMPTY_STRING)) { mManifestResourcesXMLBase = manifestXMLBase; } getAdlcpLocationMD(iNode); } else if (nodeName.equals("organization")) { getAdlcpLocationMD(iNode); } else if (nodeName.equals("item")) { getAdlcpLocationMD(iNode); } else if (nodeName.equals("resources")) { // set and retrieve xml:base of resources if it exists String resourcesXMLBase = DOMTreeUtility.getAttributeValue(iNode, "base"); if (!resourcesXMLBase.equals(mEMPTY_STRING)) { mManifestResourcesXMLBase = mManifestResourcesXMLBase + resourcesXMLBase; } } else if (nodeName.equals("resource")) { // retrieve xml:base of resource if it exists // cannot set classattribute - applies to specified resource only mResourceXMLBase = DOMTreeUtility.getAttributeValue(iNode, "base"); getAdlcpLocationMD(iNode); } else if (nodeName.equals("file")) { getAdlcpLocationMD(iNode); } NodeList nodeChildren = iNode.getChildNodes(); if (nodeChildren != null) { int size = nodeChildren.getLength(); for (int i = 0; i < size; i++) { checkForAdlcpLocationMD(nodeChildren.item(i)); } } } } } /** * This method retrieves only the adlcp:location value and stores it in a * List * * @param iNode - element nodes traversed for metadata element * */ private void getAdlcpLocationMD(Node iNode) { Node metadataNode = DOMTreeUtility.getNode(iNode, "metadata"); if (metadataNode != null) { //Gets all the location metadata List<Node> locationNodeList = DOMTreeUtility.getNodes(metadataNode, "location"); // iterate through the List and get the attribute names and values int locationNodeListSize = locationNodeList.size(); for (int i = 0; i < locationNodeListSize; i++) { //Gets the location value of each node String locationValue = DOMTreeUtility.getNodeValue(locationNodeList.get(i)); locationValue = mManifestResourcesXMLBase + mResourceXMLBase + locationValue; mLocationList.add(locationValue); } } } /** * This method retrieves the attemptAbsoluteDurationLimit element from the * parent sequencing element. * * @param iNode node to be manipulated for attemptAbsoluteDurationLimit * value. * * @return String containing the attemptAbsoluteDurationLimit value. */ private String getAttemptAbsoluteDurationLimit(Node iNode) { String attemptAbsoluteDurationLimit = mEMPTY_STRING; String nodeName = iNode.getLocalName(); if (nodeName.equals("item")) { Node sequencingNode = DOMTreeUtility.getNode(iNode, "sequencing"); if (sequencingNode != null) { Node limitConditionsNode = DOMTreeUtility.getNode(sequencingNode, "limitConditions"); if (limitConditionsNode != null) { attemptAbsoluteDurationLimit = DOMTreeUtility.getAttributeValue(limitConditionsNode, "attemptAbsoluteDurationLimit"); } } } return attemptAbsoluteDurationLimit; } /** * This method initiates the retrieval of the SCO launch data * information, if this information exists in the content package test * subject. * * @param iRootNode root node manipulated for retrieval of launch data. * * @param iDefaultOrganizationOnly boolean describing the scope of the * organization that should be traversed for SCO launch data. Specific * to SRTE uses - will no longer be needed in future development. * * @param iRemoveAssets boolean describing whether or not to remove assets * from the LaunchData list. The SRTE needs this to be false in * in order to import assets as well. * * @return List containing the launch data information for SCOs. */ public List<LaunchData> getLaunchData(Node iRootNode, boolean iDefaultOrganizationOnly, boolean iRemoveAssets) { mLogger.entering("ManifestHandler", "getLaunchData(iRootNode)"); if (!mLaunchDataTracked) { setLaunchData(iRootNode, iDefaultOrganizationOnly, iRemoveAssets); } return mLaunchDataList; } /** * This method initiates the retrieval of a list of adlcp:location element * values only. * * @param iRootNode root node manipulated for retrieval of adlcp:location * metadata info. * * @return List containing the adlcp:location metadata information. */ public List<String> getLocationMD(Node iRootNode) { checkForAdlcpLocationMD(iRootNode); return mLocationList; } /** * This method initiates the retrieval of the metadata information, * if this information exists in the content package test subject. * * @param iRootNode root node manipulated for retrieval of metadata info. * * @param iBaseDirectory base directory for location of test subject * * @return List containing the metadata information. */ public List<MetadataData> getMetadata(Node iRootNode, String iBaseDirectory) { if (!mMetadataTracked) { setMetadata(iRootNode, iBaseDirectory); } return mMetadataDataList; } /** * This method retrieves the minNormalizedMeasure element from the parent * sequencing element. * * @param iNode to be manipulated for minnormalizedmeasure value. * * @return String containing the minNormalizedMeasure value. */ private String getMinNormalizedMeasure(Node iNode) { String minNormalizedMeasure = mEMPTY_STRING; String nodeName = iNode.getLocalName(); if (nodeName.equals("item")) { Node sequencingNode = DOMTreeUtility.getNode(iNode, "sequencing"); if (sequencingNode != null) { Node objectivesNode = DOMTreeUtility.getNode(sequencingNode, "objectives"); if (objectivesNode != null) { Node primaryObjectiveNode = DOMTreeUtility.getNode(objectivesNode, "primaryObjective"); if (primaryObjectiveNode != null) { String satisfiedByMeasureValue = DOMTreeUtility.getAttributeValue(primaryObjectiveNode, "satisfiedByMeasure"); if (satisfiedByMeasureValue.equals("true")) { Node minNormalizedMeasureNode = DOMTreeUtility.getNode(primaryObjectiveNode, "minNormalizedMeasure"); if (minNormalizedMeasureNode != null) { minNormalizedMeasure = DOMTreeUtility.getNodeValue(minNormalizedMeasureNode); if (minNormalizedMeasure.trim().equals(mEMPTY_STRING)) { minNormalizedMeasure = "1.0"; } } else { minNormalizedMeasure = "1.0"; } } } } } } return minNormalizedMeasure; } /** * This method gets all the sequencing objectives associated with the * current item. * * @param iNode root item node. * * @return String - returns a string contaniing the objectives data */ private String getObjectivesList(Node iNode) { int j, k; NamedNodeMap attributesList = null; String result = mEMPTY_STRING; // Gets to the objectives node, if one exists if (iNode != null) { Node objNode = DOMTreeUtility.getNode(iNode, "objectives"); if (objNode != null) { //Gets the primary objective id Node primaryObjNode = DOMTreeUtility.getNode(objNode, "primaryObjective"); if (primaryObjNode != null) { attributesList = primaryObjNode.getAttributes(); // iterate through the NamedNodeMap and get the attribute names and values for (j = 0; j < attributesList.getLength(); j++) { //Finds the schema location and parses out values if (attributesList.item(j).getLocalName().equalsIgnoreCase("objectiveID")) { result = attributesList.item(j).getNodeValue(); } } } //Gets all objective ids List<Node> objNodes = DOMTreeUtility.getNodes(objNode, "objective"); for (j = 0; j < objNodes.size(); j++) { Node currNode = objNodes.get(j); attributesList = currNode.getAttributes(); // iterate through the NamedNodeMap and get the attribute names and values for (k = 0; k < attributesList.getLength(); k++) { //Finds the schema location and parses out values if (attributesList.item(k).getLocalName().equalsIgnoreCase("objectiveID")) { result = result + "," + attributesList.item(k).getNodeValue(); } } } // end looping over nodes } // end if objNode != null } // end if iNode != null //returns objective list, if it was found. return result; } /** * This method retrieves the resource node that matches the passed in * identifier value. This method serves as a helper method. * * @param iResourcesNode Parent resources node of the resource elements. * * @param iResourceIdentifier identifier value of the resource node being * retrieved. * * @return Node resource element node that matches the identifier value. */ private Node getResourceNodeWithIdentifier(Node iResourcesNode, String iResourceIdentifier) { Node result = null; if (iResourcesNode != null) { // loop through the children of <resources> NodeList children = iResourcesNode.getChildNodes(); if (children != null) { int numChildren = children.getLength(); Node currentChild = null; String currentChildName = mEMPTY_STRING; String currentResourceIdentifier = mEMPTY_STRING; for (int i = 0; i < numChildren; i++) { currentChild = children.item(i); currentChildName = currentChild.getLocalName(); // locate the <resource> Nodes if (currentChildName.equals("resource")) { // get the identifier attribute of the current <resource> Node currentResourceIdentifier = DOMTreeUtility.getAttributeValue(currentChild, "identifier"); // match the identifier attributes and get the missing data if (currentResourceIdentifier.equals(iResourceIdentifier)) { result = currentChild; break; } } // end if currentChildName == resource } // end looping over children } // end if there are no children } // end of iResourcesNode != null return result; } /** * This method removes the asset information from the launch data list. * Assets are not launchable resources. */ private void removeAssetsFromLaunchDataList() { int size = mLaunchDataList.size(); LaunchData currentLaunchData; for (int i = 0; i < size;) { currentLaunchData = mLaunchDataList.get(i); String scormType = currentLaunchData.getSCORMType(); if (scormType.equals("asset")) { mLaunchDataList.remove(i); size = mLaunchDataList.size(); } else { i++; } } } /** * This method removes the duplicate LaunchData elements that are stored in * the list during tracking. This removal is based on the Resource * Identifier, XML Base directories, Location and Parameters. */ private void removeDuplicateLaunchData() { int size = mLaunchDataList.size(); LaunchData ldA; LaunchData ldB; String ldAid; String ldBid; String ldAll; String ldBll; for (int i = 0; i < size; i++) { ldA = mLaunchDataList.get(i); ldAid = ldA.getResourceIdentifier(); for (int j = i + 1; j < size; j++) { ldB = mLaunchDataList.get(j); ldBid = ldB.getResourceIdentifier(); if (ldBid.equals(ldAid)) { ldAll = ldA.getItemIdentifier(); ldBll = ldB.getItemIdentifier(); if (ldBll.equals(ldAll)) { mLaunchDataList.remove(j); j--; size = mLaunchDataList.size(); } } } } } /** * This method performs the actual retrieval of the SCO launch data * information, if this information exists in the content package test * subject. This method walks through the test subject dom, storing all the * SCO launch data information to the LaunchData data structure. * * @param iRootNode root node of test subject dom. * * @param iDefaultOrganizationOnly boolean describing the scope of the * organization that should be traversed for SCO launch data. Specific * to SRTE uses - will no longer be needed in future development. * * @param iRemoveAssets boolean describing whether or not to remove assets * from the LaunchData list. The SRTE needs this to be false in * in order to get LaunchData for assets as well. * */ private void setLaunchData(Node iRootNode, boolean iDefaultOrganizationOnly, boolean iRemoveAssets) { mLogger.entering("ManifestHandler", "SetLaunchData(iRootNode)"); List<Node> organizationNodes = getOrganizationNodes(iRootNode, iDefaultOrganizationOnly); int size = organizationNodes.size(); // populate the Launch Data for the Organization level for (int i = 0; i < size; i++) { Node currentOrganization = organizationNodes.get(i); String orgIdentifier = DOMTreeUtility.getAttributeValue(currentOrganization, "identifier"); addItemInfo(currentOrganization, orgIdentifier); } Node xmlBaseNode = null; String manifestXMLBase = mEMPTY_STRING; String resourcesXMLBase = mEMPTY_STRING; // calculate the <manifest>s xml:base NamedNodeMap attributes = iRootNode.getAttributes(); xmlBaseNode = attributes.getNamedItem("xml:base"); if (xmlBaseNode != null) { manifestXMLBase = xmlBaseNode.getNodeValue(); } // calculate the <resources> xml:base Node resources = DOMTreeUtility.getNode(iRootNode, "resources"); if (resources != null) { attributes = resources.getAttributes(); xmlBaseNode = attributes.getNamedItem("xml:base"); if (xmlBaseNode != null) { resourcesXMLBase = xmlBaseNode.getNodeValue(); } } // populate all Launch Data with the xml:base values size = mLaunchDataList.size(); LaunchData currentLaunchData = null; for (int j = 0; j < size; j++) { currentLaunchData = mLaunchDataList.get(j); // update the xml:base data currentLaunchData.setManifestXMLBase(manifestXMLBase); currentLaunchData.setResourcesXMLBase(resourcesXMLBase); // replace the old LaunchData Object with the updated one mLaunchDataList.remove(j); mLaunchDataList.add(j, currentLaunchData); } // populate the Launch Data for the Resource level addResourceInfo(iRootNode, iRemoveAssets); removeDuplicateLaunchData(); mLaunchDataTracked = true; } /** * This method performs the actual retrieval of the metadata information, * if this information exists in the content package test * subject. This method walks through the test subject dom, storing all * metadata information to the MetadataData data structure. xml:base is * also being tracked for the &lt;adlcp:location&gt; element. * * @param iNode element nodes traversed for metadata element. * * @param iBaseDirectory base directory for location of test subject * **/ private void setMetadata(Node iNode, String iBaseDirectory) { if (iNode != null) { String nodeName = iNode.getLocalName(); if (nodeName != null) { if (nodeName.equals("manifest")) { // set and retrieve xml:base of manifest if it exists // must first clear out xml:base values if dealing with a sub if (!mManifestResourcesXMLBase.equals(mEMPTY_STRING)) { mManifestResourcesXMLBase = mEMPTY_STRING; mResourceXMLBase = mEMPTY_STRING; } String manifestXMLBase = DOMTreeUtility.getAttributeValue(iNode, "base"); if (!manifestXMLBase.equals(mEMPTY_STRING)) { mManifestResourcesXMLBase = manifestXMLBase; } trackMetadata(iNode, "adlreg", iBaseDirectory); } else if (nodeName.equals("organization")) { trackMetadata(iNode, "adlreg", iBaseDirectory); } else if (nodeName.equals("item")) { trackMetadata(iNode, "adlreg", iBaseDirectory); } else if (nodeName.equals("resources")) { // set and retrieve xml:base of resources if it exists String resourcesXMLBase = DOMTreeUtility.getAttributeValue(iNode, "base"); if (!resourcesXMLBase.equals(mEMPTY_STRING)) { mManifestResourcesXMLBase = mManifestResourcesXMLBase + resourcesXMLBase; } } else if (nodeName.equals("resource")) { // retrieve xml:base of resource if it exists // cannot set classattribute - applies to specified resource only mResourceXMLBase = DOMTreeUtility.getAttributeValue(iNode, "base"); trackMetadata(iNode, "adlreg", iBaseDirectory); } else if (nodeName.equals("file")) { trackMetadata(iNode, "adlreg", iBaseDirectory); } NodeList nodeChildren = iNode.getChildNodes(); if (nodeChildren != null) { int size = nodeChildren.getLength(); for (int i = 0; i < size; i++) { // special check for SCORM 3rd Edition to eliminate //(sub)manifest from having metadata set String childNodeName = nodeChildren.item(i).getLocalName(); if (childNodeName != null) { if (!childNodeName.equals("manifest")) { setMetadata(nodeChildren.item(i), iBaseDirectory); } } else { setMetadata(nodeChildren.item(i), iBaseDirectory); } } } } mMetadataTracked = true; } } /** * * This method tracks the metadata information contained in the * metadata element and saves the information in the MetadataData object. * Such information saved includes the metaddata application profile type, * URI if the metadata is external stand-alone metadata, or the root node if * the metadata is inline in the form of extensions to the content package * manifest. * * @param iNode - node tracked for Metadata * * @param iApplicationProfileType Metadata Application Profile Typy (asset, * sco, activity, contentaggregation). * * @param iBaseDirectory - base directory for location of test subject * */ private void trackMetadata(Node iNode, String iApplicationProfileType, String iBaseDirectory) { Node metadataNode = DOMTreeUtility.getNode(iNode, "metadata"); if (metadataNode != null) { String identifier = DOMTreeUtility.getAttributeValue(iNode, "identifier"); //Gets all the location metadata List<Node> locationNodeList = DOMTreeUtility.getNodes(metadataNode, "location"); // iterate through the List and get the attribute names and values int locationNodeListSize = locationNodeList.size(); for (int i = 0; i < locationNodeListSize; i++) { MetadataData metadataData = new MetadataData(); metadataData.setApplicationProfileType(iApplicationProfileType); //Gets the location value of each node String locationValue = DOMTreeUtility.getNodeValue(locationNodeList.get(i)); locationValue = mManifestResourcesXMLBase + mResourceXMLBase + locationValue; metadataData.setIdentifier(identifier); metadataData.setLocation(locationValue); mMetadataDataList.add(metadataData); } //Gets all the inline metadata from the current node List<Node> lomNodelist = DOMTreeUtility.getNodes(metadataNode, "lom"); // iterate through the List and get the attribute names and values int lomNodeListSize = lomNodelist.size(); for (int j = 0; j < lomNodeListSize; j++) { MetadataData metadataData = new MetadataData(); metadataData.setApplicationProfileType(iApplicationProfileType); //Gets the location value of each node metadataData.setIdentifier(identifier); Node lomNode = lomNodelist.get(j); metadataData.setRootLOMNode(lomNode); metadataData.setLocation("inline"); mMetadataDataList.add(metadataData); } //metadataData.printToConsole(); } } }
package org.koala.runnersFramework.runners.bot; import java.util.logging.Level; import java.util.logging.Logger; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.Random; import java.util.Timer; import java.util.TimerTask; import ibis.ipl.ConnectionFailedException; import ibis.ipl.IbisIdentifier; import ibis.ipl.IbisProperties; import ibis.ipl.ReadMessage; import ibis.ipl.ReceiveTimedOutException; import ibis.ipl.SendPort; import ibis.ipl.SendPortIdentifier; import ibis.ipl.WriteMessage; import java.io.File; import java.io.FileOutputStream; import java.io.ObjectOutputStream; import java.util.HashSet; import java.util.Iterator; import org.koala.runnersFramework.runners.bot.listener.BatsServiceApiImpl; /** * This is the estimator. */ public class SamplingPhaseMaster extends Master { protected HashMap<String, HashMap<Integer, HashMap<String, Job>>> categories; private ArrayList<Job> replicatedTasks; Timer timer; private boolean selectedReferenceCluster = false; private int totalSamplingPointsCounter = 0; private int totalSamplingSubmittedCounter = 0; private double sampleCost; private long sampleMakespan; /*expressed in seconds*/ public ArrayList<Schedule> schedules; protected SamplingPhaseMaster(BoTRunner bot) throws Exception { super(bot); schedules = new ArrayList<Schedule>(); double zeta_sq = bot.zeta * bot.zeta; bot.noSampleJobs = (int) Math.ceil(bot.tasks.size() * zeta_sq / (zeta_sq + 2 * (bot.tasks.size() - 1) * bot.delta * bot.delta)); System.out.println("Sample size is: " + bot.noSampleJobs); if(bot.noSampleJobs < bot.noReplicatedJobs) { System.out.println("Bag too small!"); shutdownIbis(); throw new RuntimeException("Bag too small!"); } bot.finishedTasks = new HashSet<Job>(); replicatedTasks = new ArrayList<Job>(); Random randomSample = new Random(1111111111L); for(int i = 0; i < bot.noReplicatedJobs; i++) { replicatedTasks.add(bot.tasks.remove(randomSample.nextInt(bot.tasks.size()))); } Collection<Cluster> clusters = bot.Clusters.values(); if(bot.noSampleJobs*bot.Clusters.size() > 0.5 * totalNumberTasks) { System.out.println("Size of the BoT too small for the number of clusters"); shutdownIbis(); throw new RuntimeException("Size of the BoT too small for the number of clusters"); } try { bot.noInitialWorkers = Integer.parseInt(System.getenv().get("SAMPLING_WORKERS")); } catch (Exception E) { bot.noInitialWorkers = 1; } Cluster cheapest = findCheapest(); bot.minCostATU = Integer.MAX_VALUE; bot.maxCostATU = 0; for (Cluster cluster : clusters) { HashMap<String, WorkerStats> workersCluster = new HashMap<String, WorkerStats>(); workers.put(cluster.alias, workersCluster); cluster.setCrtNodes(0); cluster.setPendingNodes(computeRatio(cluster.costUnit,cheapest.costUnit,true)); cluster.setNecNodes(bot.noInitialWorkers); bot.minCostATU = (int) Math.min(bot.minCostATU, cluster.costUnit); bot.maxCostATU += cluster.maxNodes*cluster.costUnit; } timer = new Timer(); } private int computeRatio(double c_i, double cheapest, boolean uniform) { int initialWorkers = bot.noInitialWorkers; if(!uniform) { double ratio = c_i/cheapest; if(ratio > bot.noInitialWorkers) { ratio = bot.noInitialWorkers; } initialWorkers = (int) Math.floor(bot.noInitialWorkers / ratio); } return initialWorkers; } private Cluster findCheapest() { Cluster cheapestCluster = null; double cheapest = Double.MAX_VALUE; for (Cluster cluster : bot.Clusters.values()) { if(cluster.costUnit < cheapest) { cheapest = cluster.costUnit; cheapestCluster = cluster; } else if (cluster.costUnit == cheapest) { if(cluster.Ti < cheapestCluster.Ti) { cheapestCluster = cluster; } } } return cheapestCluster; } @Override protected boolean areWeDone() { handleLostConnections(); for(Cluster cluster : bot.Clusters.values()) { if(cluster.replicatedTasksCounter != bot.noReplicatedJobs) { return false; /* not used in all-compute-sample mode * if(cluster.isReferenceCluster) { if(cluster.samplingPointsCounter + cluster.replicatedTasksCounter != bot.noSampleJobs) { return false; } }*/ } } if((totalSamplingPointsCounter+bot.noReplicatedJobs) != bot.noSampleJobs) { return false; } masterRP.disableConnections(); /*first check whether more workers are connected*/ for (SendPortIdentifier spi : masterRP.connectedTo()) { String node = spi.ibisIdentifier().location().getLevel(0); String cl = spi.ibisIdentifier().location().getParent().toString(); /*node connected but didn't manage to send a job request, either because it died or because it * was slower than the other nodes*/ if ((workers.get(cl).get(node) == null) || /*node did not report job result back yet*/ (workers.get(cl).get(node).isFinished() == false)) { return false; } } return true; } @Override protected Job handleJobRequest(IbisIdentifier from) { String clusterName = from.location().getParent().toString(); String node = from.location().getLevel(0); Cluster cluster = bot.Clusters.get(clusterName); /*DEBUG*/ System.err.println("job request from node " + from.location().toString() + " in cluster " + clusterName); HashMap<String, WorkerStats> thisCluster = workers.get(clusterName); if(thisCluster == null) { System.out.println("Cannot find the cluster with alias " + clusterName); shutdownIbis(); throw new RuntimeException("Cannot find the cluster with alias " + clusterName); } WorkerStats reacquiredMachine = thisCluster.get(node); if(reacquiredMachine == null) { workers.get(clusterName).put(node, new WorkerStats(node, System.currentTimeMillis(), from)); } else { reacquiredMachine.reacquire(cluster.timeUnit, System.currentTimeMillis()); } cluster.setCrtNodes(cluster.getCrtNodes()+1); cluster.setPendingNodes(cluster.getPendingNodes()-1); //decideReferenceCluster(); return findNextJob(cluster, from); } @Override protected Job handleJobResult(JobResult received, IbisIdentifier from) { String node = from.location().getLevel(0); String clusterName = from.location().getParent().toString(); Cluster cluster = bot.Clusters.get(clusterName); System.err.println(from.location().toString() + " returned result of job " + received.getJobID() + " executed for (sec)" + received.getStats().getRuntime()/1000000000); /*find the job*/ Job doneJob = cluster.regressionPoints.get(received.getJobID()); if(doneJob == null) { doneJob = cluster.samplingPoints.get(received.getJobID()); if(doneJob == null) { doneJob = cluster.extraPoints.get(received.getJobID()); } else { cluster.samplingPointsCounter ++; totalSamplingPointsCounter ++; } } else { cluster.replicatedTasksCounter ++; } doneJob.runtimes.put(clusterName, new Double(received.getStats().getRuntime()/1000000000)) ; doneJob.done = true; bot.finishedTasks.add(doneJob); workers.get(clusterName).get(node).addJobStats(received.getStats().getRuntime()); //decideReferenceCluster(); return findNextJob(cluster,from); } private void decideReferenceCluster() { if(selectedReferenceCluster == false) { /*if this is the last regression point to be executed on this cluster * this cluster should become reference * replace later by better condition, since some clusters might start * somewhat later, though workers are faster, hence having a handicap * which translates in selection of "wrong" reference cluster; also, * if we do not have the same number of initial workers on each * cluster, the current condition becomes wrong*/ Double minRT = Double.MAX_VALUE; Cluster ref = null; for(Cluster cluster:bot.Clusters.values()) { if(cluster.replicatedTasksCounter != bot.noReplicatedJobs) { return; } else { if(replicatedTasks.get(0).runtimes.get(cluster.alias).doubleValue() < minRT) { minRT = replicatedTasks.get(0).runtimes.get(cluster.alias).doubleValue(); ref = cluster; } else if(replicatedTasks.get(0).runtimes.get(cluster.alias).doubleValue() == minRT) { if(ref.costUnit > cluster.costUnit) { ref = cluster; } } } } ref.isReferenceCluster = true; selectedReferenceCluster = true; } } private Job findNextJob(Cluster cluster, IbisIdentifier from) { String clusterName = cluster.alias; String node = from.location().getLevel(0); Job nextJob = null; if(cluster.regressionPoints.size() < bot.noReplicatedJobs) { /*should select next unsubmitted task, to deal with failed replicated jobs*/ nextJob = replicatedTasks.get(cluster.regressionPoints.size()); cluster.regressionPoints.put(nextJob.jobID, nextJob); } else { /* if(cluster.isReferenceCluster && (cluster.samplingPoints.size() < bot.noSampleJobs-bot.noReplicatedJobs)) {*/ if(totalSamplingSubmittedCounter < bot.noSampleJobs-bot.noReplicatedJobs) { nextJob = bot.tasks.remove(random.nextInt(bot.tasks.size())); cluster.samplingPoints.put(nextJob.jobID, nextJob); totalSamplingSubmittedCounter ++; } else { WorkerStats ws = workers.get(clusterName).get(node); long timeLeftATU = cluster.timeUnit*60000 - ws.getUptime()%(cluster.timeUnit*60000) - 60000; //System.out.println("timeLeftATU = " + timeLeftATU); if(timeLeftATU > 0) { try{ nextJob = bot.tasks.remove(random.nextInt(bot.tasks.size())); cluster.extraPoints.put(nextJob.jobID, nextJob); ws.setTimeLeftATU(timeLeftATU); terminateWorker(cluster, ws, " sampling finished, end of current ATU"); } catch(Exception e){ if(bot.tasks.size() == 0) { System.out.println("Out of tasks"); return sayGB(from); } else { System.err.println("Unknown error in sampling: "); e.getLocalizedMessage(); } } } else { System.out.println("Time is up"); return sayGB(from); } } } return nextJob; } public void terminateWorker(Cluster cluster, WorkerStats ws, String reason) { long crtTime=0; TimerTask tt = null; crtTime= System.currentTimeMillis(); tt = new MyTimerTask(cluster, ws.getIbisIdentifier(), myIbis); timer.schedule(tt,ws.timeLeftATU); } private Job sayGB (IbisIdentifier to) { System.err.println("We say goodbye to " + to.location().toString() + " from " + this.getClass().getName()); String cluster = to.location().getParent().toString(); String node = to.location().getLevel(0); workers.get(cluster).get(node).workerFinished(System.currentTimeMillis()); workers.get(cluster).get(node).setLatestJobStartTime(0); bot.Clusters.get(cluster).setCrtNodes(bot.Clusters.get(cluster).getCrtNodes()-1); return new NoJob(); } @Override protected void handleLostConnections() { String clusterName; String node; Cluster cluster; for(SendPortIdentifier lost : masterRP.lostConnections()) { cluster = bot.Clusters.get(lost.ibisIdentifier().location().getParent().toString()); clusterName = cluster.alias; node = lost.ibisIdentifier().location().getLevel(0); if(! workers.get(clusterName).get(node).isFinished()) { String jobID = findFailedJob(clusterName,node); workers.get(clusterName).get(node).workerFinished(System.currentTimeMillis()); cluster.setCrtNodes(cluster.getCrtNodes()-1); } } } @Override public void run() { boolean undone = true; timeout = (long) (BoTRunner.INITIAL_TIMEOUT_PERCENT * bot.deadline * 60000); System.err.println("Timeout is now " + timeout); long actualStartTime = System.currentTimeMillis(); while (undone) { try { ReadMessage rm = masterRP.receive(30000); Object received = rm.readObject(); IbisIdentifier from = rm.origin().ibisIdentifier(); rm.finish(); Job nextJob = null; if (received instanceof JobRequest) { nextJob = handleJobRequest(from); } else if (received instanceof JobResult) { nextJob = handleJobResult((JobResult) received, from); } else { shutdownIbis(); throw new RuntimeException("received " + "an object which is not JobRequest or JobResult:" + received); } nextJob.setNode(from.location().getLevel(0)); /*begin for hpdc tests if(! (nextJob instanceof NoJob)) { //long sleep = Long.parseLong(nextJob.args[0]); if(from.location().getParent().toString().compareTo(bot.CLUSTER2) == 0) { //nextJob.args[0] = new Long(2* sleep / 3).toString(); ((HPDCJob)nextJob).setArg(2); } else ((HPDCJob)nextJob).setArg(1); } /*end for hpdc tests*/ SendPort workReplyPort = myIbis .createSendPort(masterReplyPortType); workReplyPort.connect(from, "worker"); WriteMessage wm = workReplyPort.newMessage(); wm.writeObject(nextJob); wm.finish(); workReplyPort.close(); undone = ! areWeDone(); } catch (ReceiveTimedOutException rtoe) { decide(); System.err.println("I timed out!"); undone = ! areWeDone(); } catch (ConnectionFailedException cfe) { /* !!! don't forget to decrease the number of crt nodes*/ String clusterName = cfe.ibisIdentifier().location().getParent().toString(); Cluster cluster = bot.Clusters.get(clusterName); String node = cfe.ibisIdentifier().location().getLevel(0); String jobID = findFailedJob(clusterName,node); workers.get(clusterName).get(node).workerFinished(System.currentTimeMillis()); cluster.setCrtNodes(cluster.getCrtNodes()-1); Date d = new Date(); System.err.println(d.toString() + ": Node " + cfe.ibisIdentifier().location().toString() + " failed before receiving job " + jobID); } catch (IOException ioe) { ioe.printStackTrace(); undone = ! areWeDone(); } catch (ClassNotFoundException e) { e.printStackTrace(); } // Update progres info in the cache object double price = 0; for (Cluster cluster : bot.Clusters.values()) { Collection<WorkerStats> wss = workers.get(cluster.alias).values(); for (WorkerStats ws : wss) { price += Math.ceil((double)ws.getUptime() / 60000 / cluster.timeUnit) * cluster.costUnit; } } double oldvalue = 0; BatsServiceApiImpl.serviceState.noCompletedTasks = bot.finishedTasks.size(); oldvalue = BatsServiceApiImpl.serviceState.moneySpent; BatsServiceApiImpl.serviceState.moneySpent = price; bot.decrementUserCredit(BatsServiceApiImpl.serviceState.moneySpent - oldvalue); BatsServiceApiImpl.serviceState.moneySpentSampling = price; if (BatsServiceApiImpl.serviceState.noCompletedTasks == BatsServiceApiImpl.serviceState.noTotalTasks) { BatsServiceApiImpl.serviceState.phase = BatsServiceApiImpl.serviceState.PHASE_FINISHED_WHILE_SAMPLING; } else { if (! undone) { BatsServiceApiImpl.serviceState.phase = BatsServiceApiImpl.serviceState.PHASE_SAMPLING_READY; } } } //select last cluster as base for sampling points normalization, ArrayList<Cluster> clusterList = new ArrayList<Cluster>(bot.Clusters.values()); int baseIndex = clusterList.size()-1; Cluster base = clusterList.get(baseIndex); base.isReferenceCluster = true; base.beta0 = 0; base.beta1 = 1; for(int i=0; i < baseIndex; i++) { Cluster cluster = clusterList.get(i); cluster.linearRegression(base); for(Job j : cluster.samplingPoints.values()) { double t = j.runtimes.get(cluster.alias).doubleValue(); double tbase = (t-cluster.beta0)/cluster.beta1; j.runtimes.put(base.alias,new Double(tbase)); base.samplingPoints.put(j.jobID, j); base.samplingPointsCounter ++; } System.out.println("cluster " + cluster.alias + ": beta1=" + cluster.beta1 + ", beta0=" + cluster.beta0); } base.estimateX(); base.estimateTi(); for(Job j : base.samplingPoints.values()) { double tbase = j.runtimes.get(base.alias).doubleValue(); base.noDoneJobs ++; base.totalRuntimeSampleJobs += tbase; base.totalRuntimeDoneJobs += tbase; base.orderedSampleResultsSet.add(new JobResult(j.jobID,new JobStats((long)tbase*1000000000L))); } for(Job j : base.regressionPoints.values()) { double tbase = j.runtimes.get(base.alias).doubleValue(); base.samplingPoints.put(j.jobID, j); base.samplingPointsCounter ++; base.noDoneJobs ++; base.totalRuntimeSampleJobs += tbase; base.totalRuntimeDoneJobs += tbase; base.orderedSampleResultsSet.add(new JobResult(j.jobID,new JobStats((long)tbase*1000000000L))); } for(Job j : base.extraPoints.values()) { double tbase = j.runtimes.get(base.alias).doubleValue(); base.noDoneJobs ++; base.totalRuntimeDoneJobs += tbase; } System.out.println("cluster " + base.alias + " has " + base.samplingPointsCounter + " samples;" + " size of sampling points array is " + base.samplingPoints.size()); System.out.println("base cluster is " + base.alias + ": mean=" + base.meanX + ", variance=" + base.varXsq); for(Cluster cluster : bot.Clusters.values()) { if(!cluster.isReferenceCluster) { cluster.estimateX(base); cluster.estimateTi(base); for(Job j : base.samplingPoints.values()) { double tbase = j.runtimes.get(base.alias).doubleValue(); double t = tbase*cluster.beta1 + cluster.beta0; j.runtimes.put(cluster.alias,new Double(t)); if(cluster.samplingPoints.put(j.jobID, j) == null) cluster.samplingPointsCounter ++; cluster.noDoneJobs ++; cluster.totalRuntimeSampleJobs += t; cluster.totalRuntimeDoneJobs += t; cluster.orderedSampleResultsSet.add(new JobResult(j.jobID,new JobStats((long)t*1000000000L))); } for(Job j : cluster.extraPoints.values()) { double t = j.runtimes.get(cluster.alias).doubleValue(); cluster.noDoneJobs ++; cluster.totalRuntimeDoneJobs += t; } System.out.println("cluster " + cluster.alias + " has " + cluster.samplingPointsCounter + " samples;" + " size of sampling points array is " + cluster.samplingPoints.size()); System.out.println("cluster " + cluster.alias + ": mean=" + cluster.meanX + ", variance=" + cluster.varXsq + ", beta1=" + cluster.beta1 + ", beta0=" + cluster.beta0); } } double price = 0; for (Cluster cluster : bot.Clusters.values()) { Collection<WorkerStats> wss = workers.get(cluster.alias).values(); System.out.println("Cluster " + cluster.alias + " stats =>"); for (WorkerStats ws : wss) { ws.printStats(); price += Math.ceil((double)ws.getUptime() / 60000 / cluster.timeUnit) * cluster.costUnit; System.out.println("Unused fraction of ATU: " + (Math.ceil((double)ws.getUptime() / 60000 / cluster.timeUnit)* cluster.timeUnit*60000 - ws.getUptime()%(cluster.timeUnit*60000))); } } // Update cache object - this is probably redundant, but just making sure. double oldvalue; oldvalue = BatsServiceApiImpl.serviceState.moneySpent; BatsServiceApiImpl.serviceState.moneySpent = price; bot.decrementUserCredit(BatsServiceApiImpl.serviceState.moneySpent - oldvalue); System.out.println("Due amount sampling " + price); sampleCost = price; long totalTime = (System.currentTimeMillis()-actualStartTime)/1000; sampleMakespan = totalTime; System.out.println("Sampling phase took " + totalTime + " (sec), which is about " + totalTime/60 + "m" + totalTime%60 +"s"); Cluster mostProfitable = selectMostProfitable(); System.out.println("Most profitable machine type: " + mostProfitable.alias + ", cost: " + mostProfitable.costUnit + ", Ti: " + mostProfitable.Ti + " minutes"); for (Cluster cluster : bot.Clusters.values()) { cluster.computeJobsPerATU(); } System.out.println(" Solutions follow "); System.out.println("\tB\tC\tM"); int jobsLeft = bot.tasks.size(); // if jobsLeft == 0 BoT completed in Sampling Phase double makespanBmin = Math.ceil((jobsLeft*mostProfitable.Ti)/bot.timeUnit); double Bmin = makespanBmin*mostProfitable.costUnit; double maxSpeed = 0.0; long costMaxSpeed = 0; System.out.println("1.\t"+Bmin+"\t"+Bmin+"\t"+makespanBmin); ArrayList<Item> items = new ArrayList<Item>(); items.add(new Item(0,0,"")); for(Cluster cluster : bot.Clusters.values()) { for(int i=0;i<cluster.maxNodes;i++) items.add(new Item(1/cluster.Ti, (int) cluster.costUnit, cluster.alias)); maxSpeed += (double) (cluster.maxNodes / cluster.Ti); costMaxSpeed += cluster.maxNodes * cluster.costUnit; } double makespanMin = Math.ceil((jobsLeft/maxSpeed)/bot.timeUnit); double BmakespanMin = makespanMin * costMaxSpeed; System.out.println("---------------------"); // selectedSchedule = 0; Knapsack mooCheapest = new Knapsack(items.toArray(new Item[0]), (long)Bmin, jobsLeft, bot.minCostATU, bot.maxCostATU,(int)bot.timeUnit); HashMap<String, Integer> cheapestSol = mooCheapest.findSol(); int aini = 0; for (Cluster cluster : bot.Clusters.values()) { if(cheapestSol.get(cluster.alias) != null) aini += cheapestSol.get(cluster.alias).intValue()* Math.floor(mooCheapest.noATUPlan*bot.timeUnit/cluster.Ti); } int deltaN = jobsLeft - aini; int x = 0; boolean success = true; while(deltaN > 0) { System.out.println("deltaN=" + deltaN); deltaN = 0; x ++; System.out.println("adding 1 procent extra, x=" + x); mooCheapest = new Knapsack(items.toArray(new Item[0]), (long)Math.ceil(Bmin+(double)(x*Bmin/100)), jobsLeft, bot.minCostATU, bot.maxCostATU,(int)bot.timeUnit); cheapestSol = mooCheapest.findSol(); aini = 0; for (Cluster cluster : bot.Clusters.values()) { if(cheapestSol.get(cluster.alias) != null) aini += cheapestSol.get(cluster.alias).intValue()* Math.floor(mooCheapest.noATUPlan*bot.timeUnit/cluster.Ti); } deltaN = jobsLeft - aini; if ((long)Math.ceil(Bmin+(double)(x*Bmin/100)) > BmakespanMin) { System.out.println("Can't find cheap schedule!"); success = false; break; } } System.out.println("deltaN=" + deltaN); double BminN = Math.ceil(Bmin+(double)(x*Bmin/100)); schedules.add(new Schedule((long)BminN,mooCheapest.costPlan,mooCheapest.noATUPlan, cheapestSol)); System.out.println("2.\t" + BminN + "\t" + mooCheapest.costPlan + "\t" + mooCheapest.noATUPlan); if(x < 10) { System.out.println("---------------------"); // selectedSchedule=1; long BminPlus10 = (long)(Bmin*1.1); Knapsack mooCheapestPlus10 = new Knapsack(items.toArray(new Item[0]), BminPlus10 , jobsLeft, bot.minCostATU, bot.maxCostATU,(int)bot.timeUnit); HashMap<String, Integer> cheapestPlus10Sol = mooCheapestPlus10.findSol(); schedules.add(new Schedule(BminPlus10, mooCheapestPlus10.costPlan, mooCheapestPlus10.noATUPlan, cheapestPlus10Sol)); System.out.println("3.\t" + BminPlus10 + "\t" + mooCheapestPlus10.costPlan + "\t" + mooCheapestPlus10.noATUPlan); } if (x < 20) { System.out.println("---------------------"); // selectedSchedule=2 long BminPlus20 = (long)(Bmin*1.2); Knapsack mooCheapestPlus20 = new Knapsack(items.toArray(new Item[0]), BminPlus20 , jobsLeft, bot.minCostATU, bot.maxCostATU,(int)bot.timeUnit); HashMap<String, Integer> cheapestPlus20Sol = mooCheapestPlus20.findSol(); schedules.add(new Schedule(BminPlus20, mooCheapestPlus20.costPlan, mooCheapestPlus20.noATUPlan, cheapestPlus20Sol)); System.out.println("4.\t" + BminPlus20 + "\t" + mooCheapestPlus20.costPlan + "\t" + mooCheapestPlus20.noATUPlan); } System.out.println("---------------------"); // selectedSchedule=3 System.out.println("Initial BmakespanMin=" + BmakespanMin + "; initial makespanMin=" + makespanMin); Knapsack mooFastest = new Knapsack(items.toArray(new Item[0]), (long)BmakespanMin, jobsLeft, bot.minCostATU, bot.maxCostATU,(int)bot.timeUnit); HashMap<String, Integer> fastestSol = mooFastest.findSol(); aini = 0; for (Cluster cluster : bot.Clusters.values()) { if(fastestSol.get(cluster.alias) != null) aini += fastestSol.get(cluster.alias).intValue()* Math.floor(mooFastest.noATUPlan*bot.timeUnit/cluster.Ti); } deltaN = jobsLeft - aini; x = 0; long BdeltaN = 0; if(deltaN > 0) { System.out.println("deltaN=" + deltaN); ArrayList<Cluster> orderedByJobsPerATU = new ArrayList<Cluster>(bot.Clusters.values()); Collections.sort(orderedByJobsPerATU, new Comparator<Cluster>(){ public int compare(Cluster a, Cluster b) { return a.ni - b.ni; } }); int leftDeltaN = deltaN; for(int i=orderedByJobsPerATU.size()-1; i>=0; i--) { if(leftDeltaN > orderedByJobsPerATU.get(i).maxNodes) { BdeltaN += (long) orderedByJobsPerATU.get(i).maxNodes*orderedByJobsPerATU.get(i).costUnit; leftDeltaN -= orderedByJobsPerATU.get(i).maxNodes; } else { BdeltaN += (long) leftDeltaN*orderedByJobsPerATU.get(i).costUnit; leftDeltaN=0; break; } } } schedules.add(new Schedule((long)BmakespanMin, (long) BdeltaN, deltaN, mooFastest.costPlan, mooFastest.noATUPlan, fastestSol)); System.out.println("5.\t" + BmakespanMin + "\t" + mooFastest.costPlan + "\t" + mooFastest.noATUPlan); System.out.println("---------------------"); //selectedSchedule=4 long BmakespanMinMinus10 = (long) (BmakespanMin*0.9); Knapsack mooFastestMinus10 = new Knapsack(items.toArray(new Item[0]), BmakespanMinMinus10, jobsLeft, bot.minCostATU, bot.maxCostATU,(int)bot.timeUnit); HashMap<String, Integer> fastestMinus10Sol = mooFastestMinus10.findSol(); schedules.add(new Schedule((long)BmakespanMinMinus10, mooFastestMinus10.costPlan, mooFastestMinus10.noATUPlan, fastestMinus10Sol)); System.out.println("6.\t" + BmakespanMinMinus10 + "\t" + mooFastestMinus10.costPlan + "\t" + mooFastestMinus10.noATUPlan); System.out.println("---------------------"); //selectedSchedule=5 double BmakespanMinMinus20 = BmakespanMin*0.8; Knapsack mooFastestMinus20 = new Knapsack(items.toArray(new Item[0]), (long)BmakespanMinMinus20, jobsLeft, bot.minCostATU, bot.maxCostATU,(int)bot.timeUnit); HashMap<String, Integer> fastestMinus20Sol = mooFastestMinus20.findSol(); aini = 0; for (Cluster cluster : bot.Clusters.values()) { if(fastestMinus20Sol.get(cluster.alias) != null) aini += fastestMinus20Sol.get(cluster.alias).intValue()* Math.floor(mooFastestMinus20.noATUPlan*bot.timeUnit/cluster.Ti); } deltaN = jobsLeft - aini; /* long BdeltaNBmakespanMinMinus20 = 0; if(deltaN > 0) { System.out.println("deltaN=" + deltaN); ArrayList<Cluster> orderedByJobsPerATU = new ArrayList<Cluster>(bot.Clusters.values()); Collections.sort(orderedByJobsPerATU, new Comparator<Cluster>(){ public int compare(Cluster a, Cluster b) { return a.ni - b.ni; } }); int leftDeltaN = deltaN; for(int i=orderedByJobsPerATU.size()-1; i>=0; i--) { if(leftDeltaN > orderedByJobsPerATU.get(i).maxNodes) { BdeltaNBmakespanMinMinus20 += (long) orderedByJobsPerATU.get(i).maxNodes*orderedByJobsPerATU.get(i).costUnit; leftDeltaN -= orderedByJobsPerATU.get(i).maxNodes; } else { BdeltaNBmakespanMinMinus20 += (long) leftDeltaN*orderedByJobsPerATU.get(i).costUnit; leftDeltaN=0; break; } } } */ long Bthreshold = (long) (BmakespanMinMinus10); success = true; x = 0; while(deltaN > 0) { System.out.println("deltaN=" + deltaN); deltaN = 0; x ++; System.out.println("adding 1 procent extra, x=" + x); mooFastestMinus20 = new Knapsack(items.toArray(new Item[0]), ((long) Math.ceil(BmakespanMin*(0.8+(double)x/100))), jobsLeft, bot.minCostATU, bot.maxCostATU,(int)bot.timeUnit); fastestMinus20Sol = mooFastestMinus20.findSol(); aini = 0; for (Cluster cluster : bot.Clusters.values()) { if(fastestMinus20Sol.get(cluster.alias) != null) aini += fastestMinus20Sol.get(cluster.alias).intValue()* Math.floor(mooFastestMinus20.noATUPlan*bot.timeUnit/cluster.Ti); } deltaN = jobsLeft - aini; if (((long) Math.ceil(BmakespanMin*(0.8+(double)x/100))) > Bthreshold) { System.out.println("Can't find 20% off fastest schedule by incrementing!"); System.out.println("Schedule risky!"); success = false; break; } } if(success){ BmakespanMinMinus20 = Math.ceil(BmakespanMin*(0.8+(double)x/100)); } else { mooFastestMinus20 = new Knapsack(items.toArray(new Item[0]), (long)BmakespanMinMinus20, jobsLeft, bot.minCostATU, bot.maxCostATU,(int)bot.timeUnit); fastestMinus20Sol = mooFastestMinus20.findSol(); } schedules.add(new Schedule((long)BmakespanMinMinus20, mooFastestMinus20.costPlan, mooFastestMinus20.noATUPlan, fastestMinus20Sol)); System.out.println("7.\t" + BmakespanMinMinus20 + "\t" + mooFastestMinus20.costPlan + "\t" + mooFastestMinus20.noATUPlan); timer.cancel(); shutdownIbis(); updateFEMaxATU(); System.out.println("For me:"); System.out.println("Finished tasks: " + bot.finishedTasks.size()); System.out.println("Replicated tasks: " + this.replicatedTasks.size()); System.out.println("Remaining tasks: " + bot.tasks.size()); bot.jobsRemainingAfterSampling = bot.tasks.size(); dumpSchedules(); System.out.println("Shuting down and killing workers..."); try { this.terminateAllWorkers(); } catch (Exception E) { E.printStackTrace(System.err); } // and now quit... is there something i should do? // some shutdowns?!?! /* This is not required anymore. * It's implemented in the Executor class. * //Add user selection input int selectedSchedule = 3; Master master = null; int whichMaster = 2; try { if(whichMaster == 0) { master = new ExecutionPhaseRRMaster(bot, schedules.get(selectedSchedule)); } else if (whichMaster == 1) { master = new ExecutionPhaseMaster(bot, schedules.get(selectedSchedule)); } else if (whichMaster == 2) { master = new ExecutionTailPhaseMaster(bot, schedules.get(selectedSchedule)); } } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { master.initMasterComm(); //start workers, assuming format for reservation time interval "dd:hh:mm:00" master.startInitWorkers(); master.run(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } */ } private void updateFEMaxATU() { long maxATU = 0; for (Cluster cluster : bot.Clusters.values()) { if(cluster.timeUnit > maxATU) maxATU = cluster.timeUnit; } BatsServiceApiImpl.longestATU = maxATU; } /* * Dumps the schedules in "dump.ser" file. * further comments: should be put in Master.java */ private void dumpSchedules() { try { String fileName; if(new File(bot.schedulesFile).isAbsolute()) { fileName = bot.schedulesFile; } else { fileName = BoTRunner.path + "/" + bot.schedulesFile; } File dirs = new File(fileName.substring(0, fileName.lastIndexOf('/'))); dirs.mkdirs(); File file = new File(fileName); file.createNewFile(); FileOutputStream fos = new FileOutputStream(file); ObjectOutputStream oos = new ObjectOutputStream(fos); // don't write all the tasks. just the ones completed. // they are already stored in bot.finishedTasks; // clear the bag full of remaining tasks bot.bag.cleanup(); oos.writeObject(bot); oos.writeObject(schedules); oos.writeDouble(BatsServiceApiImpl.serviceState.moneySpent); fos.close(); System.out.println("Schedules and BoT dumped to file: " + fileName); } catch (IOException ex) { shutdownIbis(); throw new RuntimeException("Failed to save to file the computed schedules.\n" + ex); } } private Cluster orderFastestPerATU() { // TODO Auto-generated method stub return null; } private Cluster selectMostProfitable() { // TODO Auto-generated method stub Cluster mostProfitable = null; Cluster cheapest = findCheapest(); double profitMax = Double.MIN_VALUE; for(Cluster cluster : bot.Clusters.values()) { cluster.computeProfitability(cheapest); if(cluster.profitability > profitMax) { profitMax = cluster.profitability; mostProfitable = cluster; } } return mostProfitable; } private void decide() { } private String findFailedJob(String clusterName, String node) { Cluster cluster = bot.Clusters.get(clusterName); String jobId = null; for(Job j : cluster.extraPoints.values()) { if ((!j.done) && (j.getNode().compareTo(node)==0)) { jobId = j.getJobID(); cluster.extraPoints.remove(j.getJobID()); bot.tasks.add(j); System.err.println("Node " + node + " in cluster " + clusterName + " failed to execute (extra point) job " + jobId); break; } } if(jobId == null) { for(Job j : cluster.samplingPoints.values()) { if ((!j.done) && (j.getNode().compareTo(node)==0)) { jobId = j.getJobID(); cluster.samplingPoints.remove(j.getJobID()); bot.tasks.add(j); System.err.println("Node " + node + " in cluster " + clusterName + " failed to execute (sampling point) job " + jobId); break; } } if(jobId == null) { for(Job j : cluster.regressionPoints.values()) { if ((!j.done) && (j.getNode().compareTo(node)==0)) { jobId = j.getJobID(); cluster.regressionPoints.remove(j.getJobID()); System.err.println("Node " + node + " in cluster " + clusterName + " failed to execute (regression point) job " + jobId); break; } } } } return jobId; } @Override public void startInitWorkers() { Collection<Cluster> clusters = bot.Clusters.values(); for (Cluster c : clusters) { System.err .println("BoTRunner has found " + bot.tasks.size() + " jobs; will send " + bot.noReplicatedJobs + " to " + c.pendingNodes + " initial workers on cluster " + c.alias); Process p = c.startNodes(/* deadline2ResTime() */"4:45:00", c.pendingNodes, bot.electionName, bot.poolName, bot.serverAddress); // sshRunners.put(c.alias, p); } } public double getSampleCost() { return sampleCost; } public long getSampleMakespan() { return sampleMakespan; } public void shutdownIbis() { try { masterRP.close(); System.out.println("Hurray! I shut down masterRP!!!"); myIbis.end(); System.out.println("Hurray! I shut down ibis!!!"); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.matrix; import java.util.HashSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.mapred.Counters.Group; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RunningJob; import org.apache.sysml.conf.ConfigurationManager; import org.apache.sysml.conf.DMLConfig; import org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer; import org.apache.sysml.runtime.instructions.MRJobInstruction; import org.apache.sysml.runtime.matrix.data.InputInfo; import org.apache.sysml.runtime.matrix.data.MatrixBlock; import org.apache.sysml.runtime.matrix.data.MatrixIndexes; import org.apache.sysml.runtime.matrix.data.OutputInfo; import org.apache.sysml.runtime.matrix.data.TaggedAdaptivePartialBlock; import org.apache.sysml.runtime.matrix.mapred.MRConfigurationNames; import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration; import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration.MatrixChar_N_ReducerGroups; import org.apache.sysml.runtime.matrix.mapred.ReblockMapper; import org.apache.sysml.runtime.matrix.mapred.ReblockReducer; import org.apache.sysml.yarn.ropt.YarnClusterAnalyzer; /* * inputs: input matrices, the inputs are indexed by 0, 1, 2, .. based on the position in this string * inputInfos: the input format information for the input matrices * rlen: the number of rows for each matrix * clen: the number of columns for each matrix * brlen: the number of rows per block for each input matrix * bclen: the number of columns per block for each input matrix * instructionsInMapper: in Mapper, the set of unary operations that need to be performed on each input matrix * reblockInstructions: the reblock instructions * otherInstructionsInReducer: the mixed operations that need to be performed on matrices * numReducers: the number of reducers * replication: the replication factor for the output * resulltIndexes: the indexes of the result matrices that needs to be outputted. * outputs: the names for the output directories, one for each result index * outputInfos: output format information for the output matrices */ public class ReblockMR { private static final Log LOG = LogFactory.getLog(ReblockMR.class.getName()); private ReblockMR() { //prevent instantiation via private constructor } public static JobReturn runJob(MRJobInstruction inst, String[] inputs, InputInfo[] inputInfos, long[] rlens, long[] clens, int[] brlens, int[] bclens, long[] nnz, String instructionsInMapper, String reblockInstructions, String otherInstructionsInReducer, int numReducers, int replication, boolean jvmReuse, byte[] resultIndexes, String[] outputs, OutputInfo[] outputInfos) throws Exception { JobConf job = new JobConf(ReblockMR.class); job.setJobName("Reblock-MR"); byte[] realIndexes=new byte[inputs.length]; for(byte b=0; b<realIndexes.length; b++) realIndexes[b]=b; //set up the input files and their format information //(internally used input converters: text2bc for text, identity for binary inputs) MRJobConfiguration.setUpMultipleInputsReblock(job, realIndexes, inputs, inputInfos, brlens, bclens); //set up the dimensions of input matrices MRJobConfiguration.setMatricesDimensions(job, realIndexes, rlens, clens, nnz); //set up the block size MRJobConfiguration.setBlocksSizes(job, realIndexes, brlens, bclens); //set up unary instructions that will perform in the mapper MRJobConfiguration.setInstructionsInMapper(job, instructionsInMapper); //set up the aggregate instructions that will happen in the combiner and reducer MRJobConfiguration.setReblockInstructions(job, reblockInstructions); //set up the instructions that will happen in the reducer, after the aggregation instrucions MRJobConfiguration.setInstructionsInReducer(job, otherInstructionsInReducer); //set up the replication factor for the results job.setInt(MRConfigurationNames.DFS_REPLICATION, replication); //disable automatic tasks timeouts and speculative task exec job.setInt(MRConfigurationNames.MR_TASK_TIMEOUT, 0); job.setMapSpeculativeExecution(false); //set up preferred custom serialization framework for binary block format if( MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION ) MRJobConfiguration.addBinaryBlockSerializationFramework( job ); //set up custom map/reduce configurations DMLConfig config = ConfigurationManager.getDMLConfig(); MRJobConfiguration.setupCustomMRConfigurations(job, config); //enable jvm reuse (based on SystemML configuration) if( jvmReuse ) job.setNumTasksToExecutePerJvm(-1); //set up what matrices are needed to pass from the mapper to reducer HashSet<Byte> mapoutputIndexes=MRJobConfiguration.setUpOutputIndexesForMapper(job, realIndexes, instructionsInMapper, reblockInstructions, null, otherInstructionsInReducer, resultIndexes); MatrixChar_N_ReducerGroups ret=MRJobConfiguration.computeMatrixCharacteristics(job, realIndexes, instructionsInMapper, reblockInstructions, null, null, otherInstructionsInReducer, resultIndexes, mapoutputIndexes, false); MatrixCharacteristics[] stats=ret.stats; //set up the number of reducers (according to output size) int numRed = determineNumReducers(rlens, clens, nnz, config.getIntValue(DMLConfig.NUM_REDUCERS), ret.numReducerGroups); job.setNumReduceTasks(numRed); //setup in-memory reduce buffers budget (reblock reducer dont need much memory) //job.set(MRConfigurationNames.MR_REDUCE_INPUT_BUFFER_PERCENT, "0.70"); // Print the complete instruction if (LOG.isTraceEnabled()) inst.printCompleteMRJobInstruction(stats); // Update resultDimsUnknown based on computed "stats" byte[] resultDimsUnknown = new byte[resultIndexes.length]; for ( int i=0; i < resultIndexes.length; i++ ) { if ( stats[i].getRows() == -1 || stats[i].getCols() == -1 ) { resultDimsUnknown[i] = (byte) 1; } else { resultDimsUnknown[i] = (byte) 0; } } //set up the multiple output files, and their format information MRJobConfiguration.setUpMultipleOutputs(job, resultIndexes, resultDimsUnknown, outputs, outputInfos, true, true); // configure mapper and the mapper output key value pairs job.setMapperClass(ReblockMapper.class); job.setMapOutputKeyClass(MatrixIndexes.class); //represent key offsets for block job.setMapOutputValueClass(TaggedAdaptivePartialBlock.class); //binary cell/block //configure reducer job.setReducerClass(ReblockReducer.class); // By default, the job executes in "cluster" mode. // Determine if we can optimize and run it in "local" mode. // at this point, both reblock_binary and reblock_text are similar MatrixCharacteristics[] inputStats = new MatrixCharacteristics[inputs.length]; for ( int i=0; i < inputs.length; i++ ) { inputStats[i] = new MatrixCharacteristics(rlens[i], clens[i], brlens[i], bclens[i]); } //set unique working dir MRJobConfiguration.setUniqueWorkingDir(job); RunningJob runjob=JobClient.runJob(job); /* Process different counters */ Group group=runjob.getCounters().getGroup(MRJobConfiguration.NUM_NONZERO_CELLS); for(int i=0; i<resultIndexes.length; i++) { // number of non-zeros stats[i].setNonZeros( group.getCounter(Integer.toString(i)) ); // System.out.println("result #"+resultIndexes[i]+" ===>\n"+stats[i]); } return new JobReturn(stats, outputInfos, runjob.isSuccessful()); } private static int determineNumReducers( long[] rlen, long[] clen, long[] nnz, int defaultNumRed, long numRedGroups ) { //init return with default value int ret = defaultNumRed; //determine max output matrix size long maxNumRed = InfrastructureAnalyzer.getRemoteParallelReduceTasks(); long blockSize = InfrastructureAnalyzer.getHDFSBlockSize()/(1024*1024); long maxSize = -1; //in MB for( int i=0; i<rlen.length; i++ ) { long lnnz = (nnz[i] > 0) ? nnz[i] : rlen[i] * clen[i]; long tmp = MatrixBlock.estimateSizeOnDisk(rlen[i], clen[i], lnnz) / (1024*1024); maxSize = Math.max(maxSize, tmp); } //correction max number of reducers on yarn clusters if( InfrastructureAnalyzer.isYarnEnabled() ) maxNumRed = Math.max( maxNumRed, YarnClusterAnalyzer.getNumCores()/2 ); //increase num reducers wrt input size / hdfs blocksize (up to max reducers) ret = (int)Math.max(ret, Math.min(maxSize/blockSize, maxNumRed)); //reduce num reducers for few result blocks ret = (int)Math.min(ret, numRedGroups); //ensure there is at least one reducer ret = Math.max(ret, 1); return ret; } }
// ======================================================================== // $Id: SecurityConstraint.java,v 1.44 2005/08/13 00:01:24 gregwilkins Exp $ // Copyright 200-2004 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ======================================================================== package org.browsermob.proxy.jetty.http; import org.apache.commons.logging.Log; import org.browsermob.proxy.jetty.jetty.servlet.FormAuthenticator; import org.browsermob.proxy.jetty.log.LogFactory; import org.browsermob.proxy.jetty.util.LazyList; import java.io.IOException; import java.io.Serializable; import java.security.Principal; import java.util.Collections; import java.util.List; /* ------------------------------------------------------------ */ /** Describe an auth and/or data constraint. * * @version $Revision: 1.44 $ * @author Greg Wilkins (gregw) */ public class SecurityConstraint implements Cloneable, Serializable { private static Log log= LogFactory.getLog(SecurityConstraint.class); /* ------------------------------------------------------------ */ public final static String __BASIC_AUTH= "BASIC"; public final static String __FORM_AUTH= "FORM"; public final static String __DIGEST_AUTH= "DIGEST"; public final static String __CERT_AUTH= "CLIENT_CERT"; public final static String __CERT_AUTH2= "CLIENT-CERT"; /* ------------------------------------------------------------ */ public final static int DC_UNSET= -1, DC_NONE= 0, DC_INTEGRAL= 1, DC_CONFIDENTIAL= 2; /* ------------------------------------------------------------ */ public final static String NONE= "NONE"; public final static String ANY_ROLE= "*"; /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /** Nobody user. * The Nobody UserPrincipal is used to indicate a partial state of * authentication. A request with a Nobody UserPrincipal will be allowed * past all authentication constraints - but will not be considered an * authenticated request. It can be used by Authenticators such as * FormAuthenticator to allow access to logon and error pages within an * authenticated URI tree. */ public static class Nobody implements Principal { public String getName() { return "Nobody"; } } public final static Nobody __NOBODY= new Nobody(); /* ------------------------------------------------------------ */ private String _name; private Object _methods; private Object _roles; private int _dataConstraint= DC_UNSET; private boolean _anyRole= false; private boolean _authenticate= false; private transient List _umMethods; private transient List _umRoles; /* ------------------------------------------------------------ */ /** Constructor. */ public SecurityConstraint() {} /* ------------------------------------------------------------ */ /** Conveniance Constructor. * @param name * @param role */ public SecurityConstraint(String name, String role) { setName(name); addRole(role); } /* ------------------------------------------------------------ */ /** * @param name */ public void setName(String name) { _name= name; } /* ------------------------------------------------------------ */ /** * @param method */ public synchronized void addMethod(String method) { _methods= LazyList.add(_methods, method); } /* ------------------------------------------------------------ */ public List getMethods() { if (_umMethods == null && _methods != null) _umMethods= Collections.unmodifiableList(LazyList.getList(_methods)); return _umMethods; } /* ------------------------------------------------------------ */ /** * @param method Method name. * @return True if this constraint applies to the method. If no * method has been set, then the constraint applies to all methods. */ public boolean forMethod(String method) { if (_methods == null) return true; for (int i= 0; i < LazyList.size(_methods); i++) if (LazyList.get(_methods, i).equals(method)) return true; return false; } /* ------------------------------------------------------------ */ /** * @param role The rolename. If the rolename is '*' all other * roles are removed and anyRole is set true and subsequent * addRole calls are ignored. * Authenticate is forced true by this call. */ public synchronized void addRole(String role) { _authenticate= true; if (ANY_ROLE.equals(role)) { _roles= null; _umRoles= null; _anyRole= true; } else if (!_anyRole) _roles= LazyList.add(_roles, role); } /* ------------------------------------------------------------ */ /** * @return True if any user role is permitted. */ public boolean isAnyRole() { return _anyRole; } /* ------------------------------------------------------------ */ /** * @return List of roles for this constraint. */ public List getRoles() { if (_umRoles == null && _roles != null) _umRoles= Collections.unmodifiableList(LazyList.getList(_roles)); return _umRoles; } /* ------------------------------------------------------------ */ /** * @param role * @return True if the constraint contains the role. */ public boolean hasRole(String role) { return LazyList.contains(_roles, role); } /* ------------------------------------------------------------ */ /** * @param authenticate True if users must be authenticated */ public void setAuthenticate(boolean authenticate) { _authenticate= authenticate; } /* ------------------------------------------------------------ */ /** * @return True if the constraint requires request authentication */ public boolean getAuthenticate() { return _authenticate; } /* ------------------------------------------------------------ */ /** * @return True if authentication required but no roles set */ public boolean isForbidden() { return _authenticate && !_anyRole && LazyList.size(_roles) == 0; } /* ------------------------------------------------------------ */ /** * @param c */ public void setDataConstraint(int c) { if (c < 0 || c > DC_CONFIDENTIAL) throw new IllegalArgumentException("Constraint out of range"); _dataConstraint= c; } /* ------------------------------------------------------------ */ /** * @return Data constrain indicator: 0=DC+NONE, 1=DC_INTEGRAL & 2=DC_CONFIDENTIAL */ public int getDataConstraint() { return _dataConstraint; } /* ------------------------------------------------------------ */ /** * @return True if a data constraint has been set. */ public boolean hasDataConstraint() { return _dataConstraint >= DC_NONE; } /* ------------------------------------------------------------ */ public Object clone() throws CloneNotSupportedException { SecurityConstraint sc = (SecurityConstraint) super.clone(); sc._umMethods=null; sc._umRoles=null; return sc; } /* ------------------------------------------------------------ */ public String toString() { return "SC{" + _name + "," + _methods + "," + (_anyRole ? "*" : (_roles == null ? "-" : _roles.toString())) + "," + (_dataConstraint == DC_NONE ? "NONE}" : (_dataConstraint == DC_INTEGRAL ? "INTEGRAL}" : "CONFIDENTIAL}")); } /* ------------------------------------------------------------ */ /** Check security contraints * @param constraints * @param authenticator * @param realm * @param pathInContext * @param request * @param response * @return false if the request has failed a security constraint or the authenticator has already sent a response. * @exception HttpException * @exception IOException */ public static boolean check( List constraints, Authenticator authenticator, UserRealm realm, String pathInContext, HttpRequest request, HttpResponse response) throws HttpException, IOException { // Combine data and auth constraints int dataConstraint= DC_NONE; Object roles= null; boolean unauthenticated= false; boolean forbidden= false; for (int c= 0; c < constraints.size(); c++) { SecurityConstraint sc= (SecurityConstraint)constraints.get(c); // Check the method applies if (!sc.forMethod(request.getMethod())) continue; // Combine data constraints. if (dataConstraint > DC_UNSET && sc.hasDataConstraint()) { if (sc.getDataConstraint() > dataConstraint) dataConstraint= sc.getDataConstraint(); } else dataConstraint= DC_UNSET; // ignore all other data constraints // Combine auth constraints. if (!unauthenticated && !forbidden) { if (sc.getAuthenticate()) { if (sc.isAnyRole()) { roles= ANY_ROLE; } else { List scr= sc.getRoles(); if (scr == null || scr.size() == 0) { forbidden= true; break; } else { if (roles != ANY_ROLE) { roles= LazyList.addCollection(roles, scr); } } } } else unauthenticated= true; } } // Does this forbid everything? if (forbidden && (!(authenticator instanceof FormAuthenticator) || !((FormAuthenticator)authenticator).isLoginOrErrorPage(pathInContext))) { HttpContext.sendContextError(response,HttpResponse.__403_Forbidden,null); return false; } // Handle data constraint if (dataConstraint > DC_NONE) { HttpConnection connection= request.getHttpConnection(); HttpListener listener= connection.getListener(); switch (dataConstraint) { case SecurityConstraint.DC_INTEGRAL : if (listener.isIntegral(connection)) break; if (listener.getIntegralPort() > 0) { String url= listener.getIntegralScheme() + "://" + request.getHost() + ":" + listener.getIntegralPort() + request.getPath(); if (request.getQuery() != null) url += "?" + request.getQuery(); response.setContentLength(0); response.sendRedirect(url); } else HttpContext.sendContextError(response,HttpResponse.__403_Forbidden,null); return false; case SecurityConstraint.DC_CONFIDENTIAL : if (listener.isConfidential(connection)) break; if (listener.getConfidentialPort() > 0) { String url= listener.getConfidentialScheme() + "://" + request.getHost() + ":" + listener.getConfidentialPort() + request.getPath(); if (request.getQuery() != null) url += "?" + request.getQuery(); response.setContentLength(0); response.sendRedirect(url); } else HttpContext.sendContextError(response,HttpResponse.__403_Forbidden,null); return false; default : HttpContext.sendContextError(response,HttpResponse.__403_Forbidden,null); return false; } } // Does it fail a role check? if (!unauthenticated && roles != null) { if (realm == null) { HttpContext.sendContextError(response,HttpResponse.__500_Internal_Server_Error,"Configuration error"); return false; } Principal user= null; // Handle pre-authenticated request if (request.getAuthType() != null && request.getAuthUser() != null) { // TODO - is this still needed??? user= request.getUserPrincipal(); if (user == null) user= realm.authenticate(request.getAuthUser(), null, request); if (user == null && authenticator != null) user= authenticator.authenticate(realm, pathInContext, request, response); } else if (authenticator != null) { // User authenticator. user= authenticator.authenticate(realm, pathInContext, request, response); } else { // don't know how authenticate log.warn("Mis-configured Authenticator for " + request.getPath()); HttpContext.sendContextError(response,HttpResponse.__500_Internal_Server_Error,"Configuration error"); } // If we still did not get a user if (user == null) return false; // Auth challenge or redirection already sent else if (user == __NOBODY) return true; // The Nobody user indicates authentication in transit. if (roles != ANY_ROLE) { boolean inRole= false; for (int r= LazyList.size(roles); r-- > 0;) { if (realm.isUserInRole(user, (String)LazyList.get(roles, r))) { inRole= true; break; } } if (!inRole) { log.warn("AUTH FAILURE: role for " + user.getName()); if ("BASIC".equalsIgnoreCase(authenticator.getAuthMethod())) ((BasicAuthenticator)authenticator).sendChallenge(realm, response); else HttpContext.sendContextError(response,HttpResponse.__403_Forbidden,"User not in required role"); return false; // role failed. } } } else { request.setUserPrincipal(HttpRequest.__NOT_CHECKED); } return true; } }
/** * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apereo.portal.portlet.container; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.util.Locale; import javax.portlet.CacheControl; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.Validate; import org.apache.pluto.container.PortletContainer; import org.apache.pluto.container.PortletMimeResponseContext; import org.apache.pluto.container.PortletURLProvider; import org.apache.pluto.container.PortletURLProvider.TYPE; import org.apereo.portal.portlet.container.properties.IRequestPropertiesManager; import org.apereo.portal.portlet.container.services.IPortletCookieService; import org.apereo.portal.portlet.om.IPortletWindow; import org.apereo.portal.portlet.om.IPortletWindowId; import org.apereo.portal.portlet.rendering.IPortletRenderer; import org.apereo.portal.portlet.rendering.PortletOutputHandler; import org.apereo.portal.portlet.url.PortletURLProviderImpl; import org.apereo.portal.url.IPortalUrlBuilder; import org.apereo.portal.url.IPortalUrlProvider; import org.apereo.portal.url.IPortletUrlBuilder; import org.apereo.portal.url.UrlType; /** * @author Eric Dalquist * @version $Revision$ */ public abstract class PortletMimeResponseContextImpl extends PortletResponseContextImpl implements PortletMimeResponseContext { private final IPortalUrlProvider portalUrlProvider; private final PortletOutputHandler portletOutputHandler; private final CacheControl cacheControl; public PortletMimeResponseContextImpl(PortletContainer portletContainer, IPortletWindow portletWindow, HttpServletRequest containerRequest, HttpServletResponse containerResponse, IRequestPropertiesManager requestPropertiesManager, IPortalUrlProvider portalUrlProvider, IPortletCookieService portletCookieService) { super(portletContainer, portletWindow, containerRequest, containerResponse, requestPropertiesManager, portletCookieService); Validate.notNull(portalUrlProvider, "portalUrlProvider can not be null"); this.portalUrlProvider = portalUrlProvider; this.portletOutputHandler = (PortletOutputHandler)containerRequest.getAttribute(IPortletRenderer.ATTRIBUTE__PORTLET_OUTPUT_HANDLER); Validate.notNull(portletOutputHandler, "No " + IPortletRenderer.ATTRIBUTE__PORTLET_OUTPUT_HANDLER + " attribute found in request"); this.cacheControl = (CacheControl)containerRequest.getAttribute(IPortletRenderer.ATTRIBUTE__PORTLET_CACHE_CONTROL); Validate.notNull(cacheControl, "No " + IPortletRenderer.ATTRIBUTE__PORTLET_OUTPUT_HANDLER + " attribute found in request"); } protected final PortletOutputHandler getPortletOutputHandler() { return portletOutputHandler; } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#getCacheControl() */ @Override public CacheControl getCacheControl() { this.checkContextStatus(); return this.cacheControl; } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#flushBuffer() */ @Override public void flushBuffer() throws IOException { this.checkContextStatus(); this.portletOutputHandler.flushBuffer(); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#getBufferSize() */ @Override public int getBufferSize() { return this.portletOutputHandler.getBufferSize(); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#getCharacterEncoding() */ @Override public String getCharacterEncoding() { this.checkContextStatus(); return this.servletResponse.getCharacterEncoding(); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#getContentType() */ @Override public String getContentType() { this.checkContextStatus(); return this.servletResponse.getContentType(); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#getLocale() */ @Override public Locale getLocale() { this.checkContextStatus(); return this.servletResponse.getLocale(); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#getOutputStream() */ @Override public OutputStream getOutputStream() throws IOException, IllegalStateException { this.checkContextStatus(); return this.portletOutputHandler.getOutputStream(); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#getPortletURLProvider(org.apache.pluto.container.PortletURLProvider.TYPE) */ @Override public PortletURLProvider getPortletURLProvider(TYPE type) { final IPortletWindowId portletWindowId = this.portletWindow.getPortletWindowId(); final UrlType urlType = UrlType.fromPortletUrlType(type); final IPortalUrlBuilder portalUrlBuilder = this.portalUrlProvider.getPortalUrlBuilderByPortletWindow(containerRequest, portletWindowId, urlType); final IPortletUrlBuilder portletUrlBuilder = portalUrlBuilder.getPortletUrlBuilder(portletWindowId); return new PortletURLProviderImpl(portletUrlBuilder); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#getWriter() */ @Override public PrintWriter getWriter() throws IOException, IllegalStateException { this.checkContextStatus(); return this.portletOutputHandler.getPrintWriter(); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#isCommitted() */ @Override public boolean isCommitted() { return this.portletOutputHandler.isCommitted(); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#reset() */ @Override public void reset() { this.checkContextStatus(); this.portletOutputHandler.reset(); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#resetBuffer() */ @Override public void resetBuffer() { this.checkContextStatus(); this.portletOutputHandler.resetBuffer(); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#setBufferSize(int) */ @Override public void setBufferSize(int size) { this.checkContextStatus(); this.portletOutputHandler.setBufferSize(size); } /* (non-Javadoc) * @see org.apache.pluto.container.PortletMimeResponseContext#setContentType(java.lang.String) */ @Override public void setContentType(String contentType) { this.checkContextStatus(); this.portletOutputHandler.setContentType(contentType); } }
package abra.cadabra; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import abra.CompareToReference2; import abra.Feature; import abra.Logger; import abra.Pair; import abra.SAMRecordUtils; import htsjdk.samtools.Cigar; import htsjdk.samtools.CigarElement; import htsjdk.samtools.CigarOperator; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMRecord; import htsjdk.samtools.SAMSequenceRecord; import htsjdk.samtools.SamReader; import htsjdk.samtools.TextCigarCodec; public class SimpleAlleleCounter { private static final int MIN_MAPQ = 20; private static final int MIN_BASEQ = 20; private static final int MAX_READS = 500000; private String bam; private ReadLocusReader sample; private CompareToReference2 c2r; private String inputVcf; private List<InputVariant> inputVariants; List<SampleCall> sampleRecords = new ArrayList<SampleCall>(); SimpleAlleleCounter(CompareToReference2 c2r, String bam, String vcf) { this.c2r = c2r; this.bam = bam; this.inputVcf = vcf; } void run() throws IOException { loadInputVariants(); outputHeader(); for (InputVariant variant : inputVariants) { process(variant); } } void process(InputVariant variant) throws IOException { Feature region = new Feature(variant.getChrom(), variant.getPos(), variant.getPos()); sample = new ReadLocusReader(bam, region, MAX_READS); processSimple(variant); sample.close(); } private void loadInputVariants() throws IOException { inputVariants = new ArrayList<InputVariant>(); BufferedReader reader = new BufferedReader(new FileReader(inputVcf)); String line = reader.readLine(); while (line != null) { if (!line.startsWith("#") && !line.trim().equals("")) { inputVariants.add(InputVariant.create(line)); } line = reader.readLine(); } reader.close(); } private void processSimple(InputVariant variant) { Iterator<ReadsAtLocus> sampleIter = sample.iterator(); ReadsAtLocus sampleReads = null; SampleCall call = null; while (sampleIter.hasNext() && call == null) { sampleReads = sampleIter.next(); call = processLocus(sampleReads, variant); } if (call == null) { // Create empty call Allele ref = Allele.getAllele(variant.getRef().charAt(0)); Allele alt = variant.getAllele(); call = SampleCall.emptyCall(variant.getChrom(), variant.getPos(), ref, alt, variant.getRef(), variant.getAlt()); } System.out.println(call); } // private boolean sampleCallExceedsThresholds(SampleCall call) { // return call.alt != null && call.alt != Allele.UNK && call.alleleCounts.get(call.alt).getCount() >= MIN_SUPPORTING_READS && // call.getVaf() >= options.getMinVaf() && call.qual >= options.getMinQual(); // } private int getRepeatLength(int period, String unit, Allele.Type alleleType) { if (alleleType == Allele.Type.DEL) { // Subtract 1 from deletions as we are looking for reference context period = Math.max(period-1, 0); } else if (alleleType != Allele.Type.INS) { period = 0; } return period * unit.length(); } // Returns Pair of <base, quality(phred)> private Pair<Character,Character> getBaseAtPosition(SAMRecord read, int refPos) { int readPos = 0; int refPosInRead = read.getAlignmentStart(); int cigarElementIdx = 0; while (refPosInRead <= refPos && cigarElementIdx < read.getCigar().numCigarElements() && readPos < read.getReadLength()) { CigarElement elem = read.getCigar().getCigarElement(cigarElementIdx++); switch(elem.getOperator()) { case H: //NOOP break; case S: case I: readPos += elem.getLength(); break; case D: case N: refPosInRead += elem.getLength(); break; case M: if (refPos < (refPosInRead + elem.getLength())) { readPos += refPos - refPosInRead; if (readPos < read.getReadLength()) { // Found the base. Return it return new Pair<Character, Character>(read.getReadString().charAt(readPos), read.getBaseQualityString().charAt(readPos)); } } else { readPos += elem.getLength(); refPosInRead += elem.getLength(); } break; default: throw new IllegalArgumentException("Invalid Cigar Operator: " + elem.getOperator() + " for read: " + read.getSAMString()); } } return null; } private char getRefBase(String chr, int pos) { return c2r.getSequence(chr, pos, 1).charAt(0); } private SampleCall processLocus(ReadsAtLocus reads, InputVariant variant) { // Always false here boolean isSomatic = false; SampleCall call = null; String chromosome = reads.getChromosome(); int position = reads.getPosition(); // Only process positions if (!variant.getChrom().equals(chromosome) || variant.getPos() != position) { return null; } int tumorMapq0 = 0; int mismatchExceededReads = 0; int totalDepth = 0; Map<Allele, AlleleCounts> alleleCounts = new HashMap<Allele, AlleleCounts>(); // Always include ref allele char refBase = getRefBase(chromosome, position); Allele refAllele = Allele.getAllele(refBase); alleleCounts.put(refAllele, new AlleleCounts()); // Add input variant allele alleleCounts.put(variant.getAllele(), new AlleleCounts()); for (SAMRecord read : reads.getReads()) { if (!read.getDuplicateReadFlag() && !read.getReadUnmappedFlag() && (read.getFlags() & 0x900) == 0) { totalDepth += 1; if (read.getMappingQuality() < MIN_MAPQ) { tumorMapq0 += 1; continue; } // This causes SNPs in HLA regions to drop out, so only run for Indels. if ((variant.getAllele().getType() == Allele.Type.DEL || variant.getAllele().getType() == Allele.Type.INS) && read.getStringAttribute("YA") == null) { // Cap # mismatches in read that can be counted as reference // This is done because realigner caps # of mismatches for remapped indel reads. // This is needed to remove ref bias int editDist = SAMRecordUtils.getEditDistance(read, null, false); int indelBases = SAMRecordUtils.getNumIndelBases(read); int numMismatches = editDist - indelBases; float mismatchRate = (float) .05; if (numMismatches > SAMRecordUtils.getMappedLength(read) * mismatchRate) { // Skip this read mismatchExceededReads += 1; continue; } } IndelInfo readElement = checkForIndelAtLocus(read, position); Allele allele = Allele.UNK; if (readElement != null) { if (readElement.getCigarElement().getOperator() == CigarOperator.D) { allele = new Allele(Allele.Type.DEL, readElement.getCigarElement().getLength()); } else if (readElement.getCigarElement().getOperator() == CigarOperator.I) { allele = new Allele(Allele.Type.INS, readElement.getCigarElement().getLength()); } } else { // Pair in format <base, quality> Pair<Character, Character> base = getBaseAtPosition(read, position); if (variant.getAllele().getType() == Allele.Type.DEL || variant.getAllele().getType() == Allele.Type.INS) { // Indel Pair<Character, Character> nextBase = getBaseAtPosition(read, position+1); IndelInfo readIndel = checkForIndelAtLocus(read.getAlignmentStart(), read.getCigar(), position); if (readIndel == null && base != null && nextBase != null && base.getSecond()-'!' >= MIN_BASEQ) { allele = Allele.getAllele(base.getFirst()); } } else if (base != null && variant.getAllele().getType() == Allele.Type.MNP) { // MNP if (base.getFirst() == variant.getAlt().charAt(0) && base.getSecond()-'!' >= MIN_BASEQ) { // Look ahead to remaining bases for comparison StringBuffer bases = new StringBuffer(); bases.append(base.getFirst()); int i = 1; while (i < variant.getAlt().length()) { Pair<Character, Character> nextBase = getBaseAtPosition(read, position+i); if (nextBase != null && nextBase.getSecond()-'!' >= MIN_BASEQ) { bases.append(nextBase.getFirst()); } else { break; } i += 1; } if (bases.toString().equals(variant.getAlt())) { allele = variant.getAllele(); } else { allele = Allele.getAllele(base.getFirst()); } } else { allele = Allele.getAllele(base.getFirst()); } } else { // SNP if (base != null && base.getSecond()-'!' >= MIN_BASEQ) { allele = Allele.getAllele(base.getFirst()); } } } if (allele != Allele.UNK) { if (!alleleCounts.containsKey(allele)) { alleleCounts.put(allele, new AlleleCounts()); } AlleleCounts ac = alleleCounts.get(allele); ac.incrementCount(read); if (readElement != null) { ac.updateReadIdx(readElement.getReadIndex()); } if (allele.getType() == Allele.Type.INS) { ac.updateInsertBases(readElement.getInsertBases()); } } } } // Allow readId sets to be garbage collected. for (AlleleCounts counts : alleleCounts.values()) { counts.clearReadIds(); } // Allele alt = getAltIndelAllele(Allele.getAllele(refBase), alleleCounts); Allele alt = variant.getAllele(); String refSeq = null; if (!isSomatic) { int chromosomeLength = c2r.getChromosomeLength(chromosome); refSeq = "N"; if (position > 10 && position < chromosomeLength-10) { refSeq = c2r.getSequence(chromosome, position-9, 20); } } // if (alt != null && (alt.getType() == Allele.Type.DEL || alt.getType() == Allele.Type.INS) && refAllele != Allele.UNK) { if (alt != null && refAllele != Allele.UNK) { AlleleCounts altCounts = alleleCounts.get(alt); AlleleCounts refCounts = alleleCounts.get(refAllele); Pair<Integer, String> repeat = getRepeatPeriod(chromosome, position, alt, variant.getAlt()); double qual = 0; int usableDepth = 0; int repeatLength = getRepeatLength(repeat.getFirst(), repeat.getSecond(), alt.getType()); AlleleCounts.setSpanEnd(position+repeatLength, alleleCounts); usableDepth = AlleleCounts.sum(alleleCounts.values()); qual = calcPhredScaledQuality(refCounts.getCount(), altCounts.getCount(), usableDepth); String refField = variant.getRef(); String altField = variant.getAlt(); // TODO: Check preferred insert bases against input variant!!! String altInsert = null; if (variant.getAllele().getType() == Allele.Type.INS) { altInsert = refField + getPreferredInsertBases(alt, altCounts); } // if (alt.getType() == Allele.Type.DEL) { // refField = getDelRefField(chromosome, position, alt.getLength()); // altField = refField.substring(0, 1); // } else if (alt.getType() == Allele.Type.INS) { // refField = getInsRefField(chromosome, position); // altField = refField + getPreferredInsertBases(alt, altCounts); // } call = new SampleCall(chromosome, position, refAllele, alt, alleleCounts, totalDepth, usableDepth, qual, repeat.getFirst(), repeat.getSecond(), tumorMapq0, refField, altField, mismatchExceededReads, refSeq, altInsert); } else { String refField = getInsRefField(chromosome, position); String altField = "."; double qual = 0; int rp = 0; String ru = ""; call = new SampleCall(chromosome, position, refAllele, Allele.UNK, alleleCounts, totalDepth, 0, qual, rp, ru, tumorMapq0, refField, altField, mismatchExceededReads, refSeq, ""); } return call; } private String getPreferredInsertBases(Allele allele, AlleleCounts counts) { String bases = null; if (counts.getPreferredInsertBases().isEmpty()) { StringBuffer buf = new StringBuffer(); for (int i=0; i<allele.getLength(); i++) { buf.append('N'); } bases = buf.toString(); } else { bases = counts.getPreferredInsertBases(); } return bases; } public static class SampleCall { public static final String FORMAT = "GT:DP:DP2:AD:AD2:ROR:LMQ:ISPAN:VAF:MER:FROR"; String chromosome; int position; Allele ref; Allele alt; Map<Allele, AlleleCounts> alleleCounts; int totalReads; int usableDepth; double qual; int repeatPeriod; String repeatUnit; int mapq0; String refField; String altField; int mismatchExceededReads; HomopolymerRun hrun; String context; int ispan; double fs; String altInsert; static SampleCall emptyCall(String chromosome, int position, Allele ref, Allele alt, String refField, String altField) { SampleCall call = new SampleCall(); call.chromosome = chromosome; call.position = position; call.ref = ref; call.alt = alt; call.alleleCounts = new HashMap<Allele, AlleleCounts>(); call.alleleCounts.put(ref, new AlleleCounts()); call.alleleCounts.put(alt, new AlleleCounts()); call.refField = refField; call.altField = altField; return call; } private SampleCall() { } SampleCall(String chromosome, int position, Allele ref, Allele alt, Map<Allele, AlleleCounts> alleleCounts, int totalReads, int usableDepth, double qual, int repeatPeriod, String repeatUnit, int mapq0, String refField, String altField, int mismatchExceededReads, String context, String altInsert) { this.chromosome = chromosome; this.position = position; this.ref = ref; this.alt = alt; this.alleleCounts = alleleCounts; this.totalReads = totalReads; this.usableDepth = usableDepth; this.qual = qual; this.repeatPeriod = repeatPeriod; this.repeatUnit = repeatUnit; this.mapq0 = mapq0; this.refField = refField; this.altField = altField; this.altInsert = altInsert; AlleleCounts altCounts = alleleCounts.get(alt); this.mismatchExceededReads = mismatchExceededReads; if (context != null) { this.hrun = HomopolymerRun.find(context); this.context = context; } ispan = altCounts == null ? 0 : altCounts.getMaxReadIdx()-altCounts.getMinReadIdx(); } public float getVaf() { float vaf = 0; AlleleCounts altCounts = alleleCounts.get(alt); if (altCounts != null && usableDepth > 0) { vaf = (float) altCounts.getCount() / (float) usableDepth; } return vaf; } public String getSampleInfo(Allele ref, Allele alt) { AlleleCounts refCounts = alleleCounts.get(ref); AlleleCounts altCounts = alleleCounts.get(alt); if (refCounts == null) { refCounts = AlleleCounts.EMPTY_COUNTS; } if (altCounts == null) { altCounts = AlleleCounts.EMPTY_COUNTS; } float vaf = getVaf(); // Calculate phred scaled probability of read orientations occurring by chance int refFwd = refCounts.getFwd(); int refRev = refCounts.getRev(); int altFwd = altCounts.getFwd(); int altRev = altCounts.getRev(); FishersExactTest test = new FishersExactTest(); double fsP = test.twoTailedTest(refFwd, refRev, altFwd, altRev); // Use abs to get rid of -0 this.fs = Math.abs(-10 * Math.log10(fsP)); String sampleInfo = String.format("0/1:%d:%d:%d,%d:%d,%d:%d,%d,%d,%d:%d:%d:%.2f:%d:%.2f", usableDepth, totalReads, refCounts.getCount(), altCounts.getCount(), refCounts.getTotalCount(), altCounts.getTotalCount(), refCounts.getFwd(), refCounts.getRev(), altCounts.getFwd(), altCounts.getRev(), mapq0, ispan, vaf, mismatchExceededReads, fs); return sampleInfo; } public String toString() { String pos = String.valueOf(position); String qualStr = String.format("%.2f", qual); int hrunLen = hrun != null ? hrun.getLength() : 0; char hrunBase = hrun != null ? hrun.getBase() : 'N'; int hrunPos = hrun != null ? hrun.getPos() : 0; String info; if (totalReads == 0) { // Skip empty call info = "."; } else if (altInsert != null && altField.length() > 1 && !altInsert.equals(altField) && alleleCounts.get(alt).getCount() > 0) { // Record info plus alternative inserted sequence (same length, but base mismatches with input variant) info = String.format("RP=%d;RU=%s;HRUN=%d,%d;CTX=%s;ALT_INSERT=%s", repeatPeriod, repeatUnit, hrunLen, hrunPos, context, altInsert.substring(1)); } else { info = String.format("RP=%d;RU=%s;HRUN=%d,%d;CTX=%s", repeatPeriod, repeatUnit, hrunLen, hrunPos, context); } String sampleInfo = getSampleInfo(ref, alt); return String.join("\t", chromosome, pos, ".", refField, altField, qualStr, ".", info, SampleCall.FORMAT, sampleInfo); } } static double strandBias(int rf, int rr, int af, int ar) { FishersExactTest test = new FishersExactTest(); double sb = test.twoTailedTest(rf, rf, af, ar); return sb; } static double calcPhredScaledQuality(int refObs, int altObs, int dp) { return -10 * Math.log10(BetaBinomial.betabinCDF(dp, altObs)); } private Pair<Integer, String> getRepeatPeriod(String chromosome, int position, Allele indel, String altString) { int chromosomeEnd = c2r.getReferenceLength(chromosome); int length = Math.min(indel.getLength() * 100, chromosomeEnd-position-2); String sequence = c2r.getSequence(chromosome, position+1, length); String bases; if (indel.getType() == Allele.Type.DEL) { bases = sequence.substring(0, indel.getLength()); } else { bases = altString.substring(1); } String repeatUnit = RepeatUtils.getRepeatUnit(bases); int period = RepeatUtils.getRepeatPeriod(repeatUnit, sequence); return new Pair<Integer, String>(period, repeatUnit); } private String getDelRefField(String chromosome, int position, int length) { return c2r.getSequence(chromosome, position, length+1); } private String getInsRefField(String chromosome, int position) { return c2r.getSequence(chromosome, position, 1); } private IndelInfo checkForIndelAtLocus(SAMRecord read, int refPos) { IndelInfo elem = null; // if (refPos == 105243047 && read.getReadName().equals("D7T4KXP1:400:C5F94ACXX:5:2302:20513:30410")) { // System.out.println("bar"); // } String contigInfo = read.getStringAttribute("YA"); if (contigInfo != null) { // Get assembled contig info. String[] fields = contigInfo.split(":"); int contigPos = Integer.parseInt(fields[1]); Cigar contigCigar = TextCigarCodec.decode(fields[2]); // Check to see if contig contains indel at current locus elem = checkForIndelAtLocus(contigPos, contigCigar, refPos); if (elem != null) { // Now check to see if this read supports the indel IndelInfo readElem = checkForIndelAtLocus(read.getAlignmentStart(), read.getCigar(), refPos); // Allow partially overlapping indels to support contig // (Should only matter for inserts) if (readElem == null || readElem.getCigarElement().getOperator() != elem.getCigarElement().getOperator()) { // Read element doesn't match contig indel elem = null; } else { elem.setReadIndex(readElem.getReadIndex()); // If this read overlaps the entire insert, capture the bases. if (elem.getCigarElement().getOperator() == CigarOperator.I) { if (elem.getCigarElement().getLength() == readElem.getCigarElement().getLength()) { String insertBases = read.getReadString().substring(readElem.getReadIndex(), readElem.getReadIndex()+readElem.getCigarElement().getLength()); elem.setInsertBases(insertBases); } else if (readElem.getCigarElement().getLength() < elem.getCigarElement().getLength()) { int lengthDiff = elem.getCigarElement().getLength() - readElem.getCigarElement().getLength(); if (readElem.getReadIndex() == 0) { elem.setReadIndex(readElem.getReadIndex() - lengthDiff); } else if (readElem.getReadIndex() == read.getReadLength()-1) { elem.setReadIndex(readElem.getReadIndex() + lengthDiff); } } } } } } return elem; } private IndelInfo checkForIndelAtLocus(int alignmentStart, Cigar cigar, int refPos) { IndelInfo ret = null; int readIdx = 0; int currRefPos = alignmentStart; for (CigarElement element : cigar.getCigarElements()) { if (element.getOperator() == CigarOperator.M) { readIdx += element.getLength(); currRefPos += element.getLength(); } else if (element.getOperator() == CigarOperator.I) { if (currRefPos == refPos+1) { ret = new IndelInfo(element, readIdx); break; } readIdx += element.getLength(); } else if (element.getOperator() == CigarOperator.D) { if (currRefPos == refPos+1) { ret = new IndelInfo(element, readIdx); break; } currRefPos += element.getLength(); } else if (element.getOperator() == CigarOperator.S) { readIdx += element.getLength(); } else if (element.getOperator() == CigarOperator.N) { currRefPos += element.getLength(); } if (currRefPos > refPos+1) { break; } } return ret; } private void outputHeader() throws IOException { SAMFileHeader header; String vcfColumns; SamReader reader = SAMRecordUtils.getSamReader(bam); header = reader.getFileHeader(); reader.close(); vcfColumns = "#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT SAMPLE"; System.out.println("##fileformat=VCFv4.2"); System.out.println("##reference=file://" + c2r.getRefFileName()); for (SAMSequenceRecord seq : header.getSequenceDictionary().getSequences()) { System.out.println(String.format("##contig=<ID=%s,length=%d>", seq.getSequenceName(), seq.getSequenceLength())); } System.out.println("##INFO=<ID=RP,Number=1,Type=Integer,Description=\"Number of times smallest repeating alternate sequence appears in the reference\">"); System.out.println("##INFO=<ID=RU,Number=1,Type=String,Description=\"Smallest repeat unit within alternate sequence. Appears RP times in reference\">"); System.out.println("##INFO=<ID=HRUN,Number=2,Type=Integer,Description=\"Length,position of homopolymer run found in CTX\">"); System.out.println("##INFO=<ID=CTX,Number=1,Type=String,Description=\"Reference context sequence\">"); System.out.println("##FORMAT=<ID=GT,Number=1,Type=String,Description=\"Genotype\">"); System.out.println("##FORMAT=<ID=DP,Number=1,Type=Integer,Description=\"Depth (fragment)\">"); System.out.println("##FORMAT=<ID=DP2,Number=1,Type=Integer,Description=\"Depth 2 (read)\">"); System.out.println("##FORMAT=<ID=AD,Number=2,Type=Integer,Description=\"Allele Depth (fragment)\">"); System.out.println("##FORMAT=<ID=AD2,Number=2,Type=Integer,Description=\"Allele Depth (read)\">"); System.out.println("##FORMAT=<ID=ROR,Number=4,Type=Integer,Description=\"Read Orientation (ref_fwd, ref_rev, alt_fwd, alt_rev)\">"); System.out.println("##FORMAT=<ID=LMQ,Number=1,Type=Integer,Description=\"Number of reads filtered due to low mapping quality\">"); System.out.println("##FORMAT=<ID=ISPAN,Number=1,Type=Integer,Description=\"Max variant read pos minus min variant read pos\">"); System.out.println("##FORMAT=<ID=VAF,Number=1,Type=Float,Description=\"Variant allele frequency\">"); System.out.println("##FORMAT=<ID=MER,Number=1,Type=Integer,Description=\"Number of ref reads with num mismatches greater than read length * .05\">"); System.out.println("##FORMAT=<ID=FROR,Number=1,Type=Float,Description=\"Phred scaled Fisher's Exact Test for read orientation\">"); System.out.println(vcfColumns); } static class InputVariant { private String chrom; private int pos; private String ref; private String alt; private Allele allele; static InputVariant create(String str) { String[] fields = str.split("\\s"); String chrom = fields[0]; int pos = Integer.parseInt(fields[1]); String ref = fields[3]; String alt = fields[4]; int length = 1; if (ref.length() != 1 && alt.length() != 1 && ref.length() != alt.length()) { // Only supporting simple indel representations for now. throw new UnsupportedOperationException("At least one of the REF and ALT fields must be of length 1 for indels"); } Allele allele = Allele.UNK; if (ref.length() > alt.length()) { length = ref.length() - alt.length(); allele = new Allele(Allele.Type.DEL, length); } else if (alt.length() > ref.length()) { length = alt.length() - ref.length(); allele = new Allele(Allele.Type.INS, length); } else if (alt.length() > 1 && alt.length() == ref.length()) { length = alt.length(); allele = Allele.getMnpAllele(alt); } else { allele = Allele.getAllele(alt.charAt(0)); } return new InputVariant(chrom, pos, ref, alt, allele); } private InputVariant(String chrom, int pos, String ref, String alt, Allele allele) { this.chrom = chrom; this.pos = pos; this.ref = ref; this.alt = alt; this.allele = allele; } public String getChrom() { return chrom; } public int getPos() { return pos; } public String getRef() { return ref; } public String getAlt() { return alt; } public Allele getAllele() { return allele; } } public static void main(String[] args) throws Exception { String ref = args[0]; String bam = args[1]; String vcf = args[2]; // String bam = "/home/lmose/dev/mc3/allele_counter/TCGA-D1-A163/TCGA-D1-A163.star.abra2.mc3.bam"; // String bam = "/home/lmose/dev/mc3/allele_counter/TCGA-3N-A9WC/TCGA-3N-A9WC.mc3.callable.bam"; // String vcf = "/home/lmose/dev/mc3/allele_counter/TCGA-D1-A163/TCGA-D1-A163.maf.mc3.vcf"; // String vcf = "/home/lmose/dev/mc3/allele_counter/TCGA-3N-A9WC/TCGA-3N-A9WC.mc3.dna.vcf"; // String ref = "/home/lmose/dev/reference/hg19/19.fa"; // String vcf = "/home/lmose/dev/mc3/allele_counter/TCGA-D1-A163/TCGA-D1-A163.maf.mc3.chr1.vcf"; // String vcf = "t6.vcf"; CompareToReference2 c2r = new CompareToReference2(); c2r.init(ref); SimpleAlleleCounter sac = new SimpleAlleleCounter(c2r, bam, vcf); sac.run(); } }
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.cxx.toolchain.CxxBuckConfig; import com.facebook.buck.cxx.toolchain.CxxPlatform; import com.facebook.buck.cxx.toolchain.linker.Linker; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkTarget; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkTargetMode; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkable; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkableInput; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkables; import com.facebook.buck.graph.AbstractBreadthFirstTraversal; import com.facebook.buck.graph.DirectedAcyclicGraph; import com.facebook.buck.graph.MutableDirectedGraph; import com.facebook.buck.graph.TopologicalSort; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.InternalFlavor; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.DefaultBuildTargetSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.args.Arg; import com.facebook.buck.rules.args.SourcePathArg; import com.facebook.buck.rules.args.StringArg; import com.facebook.buck.util.immutables.BuckStyleImmutable; import com.google.common.base.Functions; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import java.nio.file.Path; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import org.immutables.value.Value; public class Omnibus { private static final Flavor OMNIBUS_FLAVOR = InternalFlavor.of("omnibus"); private static final Flavor DUMMY_OMNIBUS_FLAVOR = InternalFlavor.of("dummy-omnibus"); private Omnibus() {} private static String getOmnibusSoname(CxxPlatform cxxPlatform) { return String.format("libomnibus.%s", cxxPlatform.getSharedLibraryExtension()); } private static BuildTarget getRootTarget(BuildTarget base, BuildTarget root) { return base.withAppendedFlavors( InternalFlavor.of(Flavor.replaceInvalidCharacters(root.toString()))); } private static BuildTarget getDummyRootTarget(BuildTarget root) { return root.withAppendedFlavors(InternalFlavor.of("dummy")); } private static boolean shouldCreateDummyRoot(NativeLinkTarget target, CxxPlatform cxxPlatform) { return target.getNativeLinkTargetMode(cxxPlatform).getType() == Linker.LinkType.EXECUTABLE; } private static Iterable<NativeLinkable> getDeps( NativeLinkable nativeLinkable, CxxPlatform cxxPlatform) { return Iterables.concat( nativeLinkable.getNativeLinkableDepsForPlatform(cxxPlatform), nativeLinkable.getNativeLinkableExportedDepsForPlatform(cxxPlatform)); } // Returned the dependencies for the given node, which can either be a `NativeLinkable` or a // `NativeLinkTarget`. private static Iterable<? extends NativeLinkable> getDeps( BuildTarget target, Map<BuildTarget, ? extends NativeLinkTarget> nativeLinkTargets, Map<BuildTarget, ? extends NativeLinkable> nativeLinkables, CxxPlatform cxxPlatform) { if (nativeLinkables.containsKey(target)) { NativeLinkable nativeLinkable = Preconditions.checkNotNull(nativeLinkables.get(target)); return getDeps(nativeLinkable, cxxPlatform); } else { NativeLinkTarget nativeLinkTarget = Preconditions.checkNotNull(nativeLinkTargets.get(target)); return nativeLinkTarget.getNativeLinkTargetDeps(cxxPlatform); } } // Build the data structure containing bookkeeping which describing the omnibus link for the // given included and excluded roots. protected static OmnibusSpec buildSpec( final CxxPlatform cxxPlatform, final Iterable<? extends NativeLinkTarget> includedRoots, final Iterable<? extends NativeLinkable> excludedRoots) { // A map of targets to native linkable objects. We maintain this, so that we index our // bookkeeping around `BuildTarget` and avoid having to guarantee that all other types are // hashable. final Map<BuildTarget, NativeLinkable> nativeLinkables = new LinkedHashMap<>(); // The nodes which should *not* be included in the omnibus link. final Set<BuildTarget> excluded = new LinkedHashSet<>(); // Process all the roots included in the omnibus link. final Map<BuildTarget, NativeLinkTarget> roots = new LinkedHashMap<>(); Map<BuildTarget, NativeLinkable> rootDeps = new LinkedHashMap<>(); for (NativeLinkTarget root : includedRoots) { roots.put(root.getBuildTarget(), root); for (NativeLinkable dep : NativeLinkables.getNativeLinkables( cxxPlatform, root.getNativeLinkTargetDeps(cxxPlatform), Linker.LinkableDepType.SHARED) .values()) { Linker.LinkableDepType linkStyle = NativeLinkables.getLinkStyle( dep.getPreferredLinkage(cxxPlatform), Linker.LinkableDepType.SHARED); Preconditions.checkState(linkStyle != Linker.LinkableDepType.STATIC); // We only consider deps which aren't *only* statically linked. if (linkStyle == Linker.LinkableDepType.SHARED) { rootDeps.put(dep.getBuildTarget(), dep); nativeLinkables.put(dep.getBuildTarget(), dep); } } } // Process all roots excluded from the omnibus link, and add them to our running list of // excluded nodes. for (NativeLinkable root : excludedRoots) { nativeLinkables.put(root.getBuildTarget(), root); excluded.add(root.getBuildTarget()); } // Perform the first walk starting from the native linkable nodes immediately reachable via the // included roots. We'll accomplish two things here: // 1. Build up the map of node names to their native linkable objects. // 2. Perform an initial discovery of dependency nodes to exclude from the omnibus link. new AbstractBreadthFirstTraversal<BuildTarget>(rootDeps.keySet()) { @Override public Iterable<BuildTarget> visit(BuildTarget target) { NativeLinkable nativeLinkable = Preconditions.checkNotNull(nativeLinkables.get(target)); ImmutableMap<BuildTarget, NativeLinkable> deps = Maps.uniqueIndex(getDeps(nativeLinkable, cxxPlatform), NativeLinkable::getBuildTarget); nativeLinkables.putAll(deps); if (!nativeLinkable.supportsOmnibusLinking(cxxPlatform)) { excluded.add(target); } return deps.keySet(); } }.start(); // Do another walk to flesh out the transitively excluded nodes. new AbstractBreadthFirstTraversal<BuildTarget>(excluded) { @Override public Iterable<BuildTarget> visit(BuildTarget target) { NativeLinkable nativeLinkable = Preconditions.checkNotNull(nativeLinkables.get(target)); ImmutableMap<BuildTarget, NativeLinkable> deps = Maps.uniqueIndex(getDeps(nativeLinkable, cxxPlatform), NativeLinkable::getBuildTarget); nativeLinkables.putAll(deps); excluded.add(target); return deps.keySet(); } }.start(); // And then we can do one last walk to create the actual graph which contain only root and body // nodes to include in the omnibus link. final MutableDirectedGraph<BuildTarget> graphBuilder = new MutableDirectedGraph<>(); final Set<BuildTarget> deps = new LinkedHashSet<>(); new AbstractBreadthFirstTraversal<BuildTarget>(Sets.difference(rootDeps.keySet(), excluded)) { @Override public Iterable<BuildTarget> visit(BuildTarget target) { graphBuilder.addNode(target); Set<BuildTarget> keep = new LinkedHashSet<>(); for (BuildTarget dep : Iterables.transform( getDeps(target, roots, nativeLinkables, cxxPlatform), NativeLinkable::getBuildTarget)) { if (excluded.contains(dep)) { deps.add(dep); } else { keep.add(dep); graphBuilder.addEdge(target, dep); } } return keep; } }.start(); DirectedAcyclicGraph<BuildTarget> graph = new DirectedAcyclicGraph<>(graphBuilder); // Since we add all undefined root symbols into the omnibus library, we also need to include // any excluded root deps as deps of omnibus, as they may fulfill these undefined symbols. // Also add any excluded nodes that are also root dependencies. deps.addAll(Sets.intersection(rootDeps.keySet(), excluded)); return ImmutableOmnibusSpec.builder() .graph(graph) .roots(roots) .body( FluentIterable.from(graph.getNodes()) .filter(Predicates.not(roots.keySet()::contains)) .toMap(Functions.forMap(nativeLinkables))) .deps(Maps.asMap(deps, Functions.forMap(nativeLinkables))) .excluded(Maps.asMap(excluded, Functions.forMap(nativeLinkables))) .build(); } // Build a dummy library with the omnibus SONAME. We'll need this to break any dep cycle between // the omnibus roots and the merged omnibus body, by first linking the roots against this // dummy lib (ignoring missing symbols), then linking the omnibus body with the roots. private static SourcePath createDummyOmnibus( BuildTarget baseTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver ruleResolver, SourcePathRuleFinder ruleFinder, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableList<? extends Arg> extraLdflags) { BuildTarget dummyOmnibusTarget = baseTarget.withAppendedFlavors(DUMMY_OMNIBUS_FLAVOR); String omnibusSoname = getOmnibusSoname(cxxPlatform); CxxLink rule = ruleResolver.addToIndex( CxxLinkableEnhancer.createCxxLinkableSharedBuildRule( cxxBuckConfig, cxxPlatform, projectFilesystem, ruleResolver, ruleFinder, dummyOmnibusTarget, BuildTargets.getGenPath(projectFilesystem, dummyOmnibusTarget, "%s") .resolve(omnibusSoname), Optional.of(omnibusSoname), extraLdflags)); return rule.getSourcePathToOutput(); } // Create a build rule which links the given root node against the merged omnibus library // described by the given spec file. protected static OmnibusRoot createRoot( BuildTarget target, ProjectFilesystem projectFilesystem, BuildRuleResolver ruleResolver, SourcePathRuleFinder ruleFinder, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableList<? extends Arg> extraLdflags, OmnibusSpec spec, SourcePath omnibus, NativeLinkTarget root, BuildTarget rootTargetBase, Optional<Path> output) { ImmutableList.Builder<Arg> argsBuilder = ImmutableList.builder(); // Add any extra flags to the link. argsBuilder.addAll(extraLdflags); // Since the dummy omnibus library doesn't actually contain any symbols, make sure the linker // won't drop its runtime reference to it. argsBuilder.addAll( StringArg.from(cxxPlatform.getLd().resolve(ruleResolver).getNoAsNeededSharedLibsFlags())); // Since we're linking against a dummy libomnibus, ignore undefined symbols. argsBuilder.addAll( StringArg.from(cxxPlatform.getLd().resolve(ruleResolver).getIgnoreUndefinedSymbolsFlags())); // Add the args for the root link target first. NativeLinkableInput input = root.getNativeLinkTargetInput(cxxPlatform); argsBuilder.addAll(input.getArgs()); // Grab a topologically sorted mapping of all the root's deps. ImmutableMap<BuildTarget, NativeLinkable> deps = NativeLinkables.getNativeLinkables( cxxPlatform, root.getNativeLinkTargetDeps(cxxPlatform), Linker.LinkableDepType.SHARED); // Now process the dependencies in topological order, to assemble the link line. boolean alreadyAddedOmnibusToArgs = false; for (Map.Entry<BuildTarget, NativeLinkable> entry : deps.entrySet()) { BuildTarget linkableTarget = entry.getKey(); NativeLinkable nativeLinkable = entry.getValue(); Linker.LinkableDepType linkStyle = NativeLinkables.getLinkStyle( nativeLinkable.getPreferredLinkage(cxxPlatform), Linker.LinkableDepType.SHARED); // If this dep needs to be linked statically, then we always link it directly. if (linkStyle != Linker.LinkableDepType.SHARED) { Preconditions.checkState(linkStyle == Linker.LinkableDepType.STATIC_PIC); argsBuilder.addAll(nativeLinkable.getNativeLinkableInput(cxxPlatform, linkStyle).getArgs()); continue; } // If this dep is another root node, substitute in the custom linked library we built for it. if (spec.getRoots().containsKey(linkableTarget)) { argsBuilder.add( SourcePathArg.of( DefaultBuildTargetSourcePath.of(getRootTarget(target, linkableTarget)))); continue; } // If we're linking this dep from the body, then we need to link via the giant merged // libomnibus instead. if (spec.getBody() .containsKey(linkableTarget)) { // && linkStyle == Linker.LinkableDepType.SHARED) { if (!alreadyAddedOmnibusToArgs) { argsBuilder.add(SourcePathArg.of(omnibus)); alreadyAddedOmnibusToArgs = true; } continue; } // Otherwise, this is either an explicitly statically linked or excluded node, so link it // normally. Preconditions.checkState(spec.getExcluded().containsKey(linkableTarget)); argsBuilder.addAll(nativeLinkable.getNativeLinkableInput(cxxPlatform, linkStyle).getArgs()); } // Create the root library rule using the arguments assembled above. BuildTarget rootTarget = getRootTarget(target, rootTargetBase); NativeLinkTargetMode rootTargetMode = root.getNativeLinkTargetMode(cxxPlatform); CxxLink rootLinkRule; switch (rootTargetMode.getType()) { // Link the root as a shared library. case SHARED: { Optional<String> rootSoname = rootTargetMode.getLibraryName(); rootLinkRule = CxxLinkableEnhancer.createCxxLinkableSharedBuildRule( cxxBuckConfig, cxxPlatform, projectFilesystem, ruleResolver, ruleFinder, rootTarget, output.orElse( BuildTargets.getGenPath(projectFilesystem, rootTarget, "%s") .resolve( rootSoname.orElse( String.format( "%s.%s", rootTarget.getShortName(), cxxPlatform.getSharedLibraryExtension())))), rootSoname, argsBuilder.build()); break; } // Link the root as an executable. case EXECUTABLE: { rootLinkRule = CxxLinkableEnhancer.createCxxLinkableBuildRule( cxxBuckConfig, cxxPlatform, projectFilesystem, ruleResolver, ruleFinder, rootTarget, output.orElse( BuildTargets.getGenPath(projectFilesystem, rootTarget, "%s") .resolve(rootTarget.getShortName())), argsBuilder.build(), Linker.LinkableDepType.SHARED, /* thinLto */ false, Optional.empty(), Optional.empty()); break; } // $CASES-OMITTED$ default: throw new IllegalStateException( String.format( "%s: unexpected omnibus root type: %s %s", target, root.getBuildTarget(), rootTargetMode.getType())); } CxxLink rootRule = ruleResolver.addToIndex(rootLinkRule); return OmnibusRoot.of(rootRule.getSourcePathToOutput()); } protected static OmnibusRoot createRoot( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver ruleResolver, SourcePathRuleFinder ruleFinder, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableList<? extends Arg> extraLdflags, OmnibusSpec spec, SourcePath omnibus, NativeLinkTarget root) { return createRoot( buildTarget, projectFilesystem, ruleResolver, ruleFinder, cxxBuckConfig, cxxPlatform, extraLdflags, spec, omnibus, root, root.getBuildTarget(), root.getNativeLinkTargetOutputPath(cxxPlatform)); } protected static OmnibusRoot createDummyRoot( BuildTarget target, ProjectFilesystem projectFilesystem, BuildRuleResolver ruleResolver, SourcePathRuleFinder ruleFinder, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableList<? extends Arg> extraLdflags, OmnibusSpec spec, SourcePath omnibus, NativeLinkTarget root) { return createRoot( target, projectFilesystem, ruleResolver, ruleFinder, cxxBuckConfig, cxxPlatform, extraLdflags, spec, omnibus, root, getDummyRootTarget(root.getBuildTarget()), Optional.empty()); } private static ImmutableList<Arg> createUndefinedSymbolsArgs( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathRuleFinder ruleFinder, CxxPlatform cxxPlatform, Iterable<? extends SourcePath> linkerInputs) { SourcePath undefinedSymbolsFile = cxxPlatform .getSymbolNameTool() .createUndefinedSymbolsFile( projectFilesystem, params, ruleResolver, ruleFinder, buildTarget.withAppendedFlavors( InternalFlavor.of("omnibus-undefined-symbols-file")), linkerInputs); return cxxPlatform .getLd() .resolve(ruleResolver) .createUndefinedSymbolsLinkerArgs( projectFilesystem, params, ruleResolver, ruleFinder, buildTarget.withAppendedFlavors(InternalFlavor.of("omnibus-undefined-symbols-args")), ImmutableList.of(undefinedSymbolsFile)); } // Create a build rule to link the giant merged omnibus library described by the given spec. protected static OmnibusLibrary createOmnibus( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathRuleFinder ruleFinder, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableList<? extends Arg> extraLdflags, OmnibusSpec spec) { ImmutableList.Builder<Arg> argsBuilder = ImmutableList.builder(); // Add extra ldflags to the beginning of the link. argsBuilder.addAll(extraLdflags); // For roots that aren't dependencies of nodes in the body, we extract their undefined symbols // to add to the link so that required symbols get pulled into the merged library. List<SourcePath> undefinedSymbolsOnlyRoots = new ArrayList<>(); for (BuildTarget target : Sets.difference(spec.getRoots().keySet(), spec.getGraph().getNodes())) { NativeLinkTarget linkTarget = Preconditions.checkNotNull(spec.getRoots().get(target)); undefinedSymbolsOnlyRoots.add( ruleResolver .requireRule( getRootTarget( buildTarget, shouldCreateDummyRoot(linkTarget, cxxPlatform) ? getDummyRootTarget(target) : target)) .getSourcePathToOutput()); } argsBuilder.addAll( createUndefinedSymbolsArgs( buildTarget, projectFilesystem, params, ruleResolver, ruleFinder, cxxPlatform, undefinedSymbolsOnlyRoots)); // Walk the graph in topological order, appending each nodes contributions to the link. ImmutableList<BuildTarget> targets = TopologicalSort.sort(spec.getGraph()).reverse(); for (BuildTarget target : targets) { // If this is a root, just place the shared library we've linked above onto the link line. // We need this so that the linker can grab any undefined symbols from it, and therefore // know which symbols to pull in from the body nodes. NativeLinkTarget root = spec.getRoots().get(target); if (root != null) { argsBuilder.add( SourcePathArg.of( ((CxxLink) ruleResolver.requireRule(getRootTarget(buildTarget, root.getBuildTarget()))) .getSourcePathToOutput())); continue; } // Otherwise, this is a body node, and we need to add its static library to the link line, // so that the linker can discard unused object files from it. NativeLinkable nativeLinkable = Preconditions.checkNotNull(spec.getBody().get(target)); NativeLinkableInput input = NativeLinkables.getNativeLinkableInput( cxxPlatform, Linker.LinkableDepType.STATIC_PIC, nativeLinkable); argsBuilder.addAll(input.getArgs()); } // We process all excluded omnibus deps last, and just add their components as if this were a // normal shared link. ImmutableMap<BuildTarget, NativeLinkable> deps = NativeLinkables.getNativeLinkables( cxxPlatform, spec.getDeps().values(), Linker.LinkableDepType.SHARED); for (NativeLinkable nativeLinkable : deps.values()) { NativeLinkableInput input = NativeLinkables.getNativeLinkableInput( cxxPlatform, Linker.LinkableDepType.SHARED, nativeLinkable); argsBuilder.addAll(input.getArgs()); } // Create the merged omnibus library using the arguments assembled above. BuildTarget omnibusTarget = buildTarget.withAppendedFlavors(OMNIBUS_FLAVOR); String omnibusSoname = getOmnibusSoname(cxxPlatform); CxxLink omnibusRule = ruleResolver.addToIndex( CxxLinkableEnhancer.createCxxLinkableSharedBuildRule( cxxBuckConfig, cxxPlatform, projectFilesystem, ruleResolver, ruleFinder, omnibusTarget, BuildTargets.getGenPath(projectFilesystem, omnibusTarget, "%s") .resolve(omnibusSoname), Optional.of(omnibusSoname), argsBuilder.build())); return OmnibusLibrary.of(omnibusSoname, omnibusRule.getSourcePathToOutput()); } /** * An alternate link strategy for languages which need to package native deps up as shared * libraries, which only links native nodes which have an explicit edge from non-native code as * separate, and statically linking all other native nodes into a single giant shared library. * This reduces the number of shared libraries considerably and also allows the linker to throw * away a lot of unused object files. * * @param nativeLinkTargetRoots root nodes which will be included in the omnibus link. * @param nativeLinkableRoots root nodes which are to be excluded from the omnibus link. * @return a map of shared library names to their containing {@link SourcePath}s. */ public static OmnibusLibraries getSharedLibraries( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathRuleFinder ruleFinder, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableList<? extends Arg> extraLdflags, Iterable<? extends NativeLinkTarget> nativeLinkTargetRoots, Iterable<? extends NativeLinkable> nativeLinkableRoots) { OmnibusLibraries.Builder libs = OmnibusLibraries.builder(); OmnibusSpec spec = buildSpec(cxxPlatform, nativeLinkTargetRoots, nativeLinkableRoots); // Create an empty dummy omnibus library, to give the roots something to link against before // we have the actual omnibus library available. Note that this requires that the linker // supports linking shared libraries with undefined references. SourcePath dummyOmnibus = createDummyOmnibus( buildTarget, projectFilesystem, ruleResolver, ruleFinder, cxxBuckConfig, cxxPlatform, extraLdflags); // Create rule for each of the root nodes, linking against the dummy omnibus library above. for (NativeLinkTarget target : spec.getRoots().values()) { // For executable roots, some platforms can't properly build them when there are any // unresolved symbols, so we initially link a dummy root just to provide a way to grab the // undefined symbol list we need to build the real omnibus library. if (shouldCreateDummyRoot(target, cxxPlatform)) { createDummyRoot( buildTarget, projectFilesystem, ruleResolver, ruleFinder, cxxBuckConfig, cxxPlatform, extraLdflags, spec, dummyOmnibus, target); } else { OmnibusRoot root = createRoot( buildTarget, projectFilesystem, ruleResolver, ruleFinder, cxxBuckConfig, cxxPlatform, extraLdflags, spec, dummyOmnibus, target); libs.putRoots(target.getBuildTarget(), root); } } // If there are any body nodes, generate the giant merged omnibus library. Optional<SourcePath> realOmnibus = Optional.empty(); if (!spec.getBody().isEmpty()) { OmnibusLibrary omnibus = createOmnibus( buildTarget, projectFilesystem, params, ruleResolver, ruleFinder, cxxBuckConfig, cxxPlatform, extraLdflags, spec); libs.addLibraries(omnibus); realOmnibus = Optional.of(omnibus.getPath()); } // Do another pass over executable roots, building the real DSO which links to the real omnibus. // See the comment above in the first pass for more details. for (NativeLinkTarget target : spec.getRoots().values()) { if (shouldCreateDummyRoot(target, cxxPlatform)) { OmnibusRoot root = createRoot( buildTarget, projectFilesystem, ruleResolver, ruleFinder, cxxBuckConfig, cxxPlatform, extraLdflags, spec, realOmnibus.orElse(dummyOmnibus), target); libs.putRoots(target.getBuildTarget(), root); } } // Lastly, add in any shared libraries from excluded nodes the normal way. for (NativeLinkable nativeLinkable : spec.getExcluded().values()) { if (nativeLinkable.getPreferredLinkage(cxxPlatform) != NativeLinkable.Linkage.STATIC) { for (Map.Entry<String, SourcePath> ent : nativeLinkable.getSharedLibraries(cxxPlatform).entrySet()) { libs.addLibraries(OmnibusLibrary.of(ent.getKey(), ent.getValue())); } } } return libs.build(); } @Value.Immutable abstract static class OmnibusSpec { // The graph containing all root and body nodes that are to be included in the omnibus link. public abstract DirectedAcyclicGraph<BuildTarget> getGraph(); // All native roots included in the omnibus. These will get linked into separate shared // libraries which depend on the giant statically linked omnibus body. public abstract ImmutableMap<BuildTarget, NativeLinkTarget> getRoots(); // All native nodes which are to be statically linked into the giant combined shared library. public abstract ImmutableMap<BuildTarget, NativeLinkable> getBody(); // All native nodes which are not included in the omnibus link, as either a root or a body node. public abstract ImmutableMap<BuildTarget, NativeLinkable> getExcluded(); // The subset of excluded nodes which are first-order deps of any root or body nodes. public abstract ImmutableMap<BuildTarget, NativeLinkable> getDeps(); @Value.Check public void verify() { // Verify that all the graph is composed entirely off root and body nodes. Preconditions.checkState( ImmutableSet.<BuildTarget>builder() .addAll(getRoots().keySet()) .addAll(getBody().keySet()) .build() .containsAll(getGraph().getNodes())); // Verify that the root, body, and excluded nodes are distinct and that deps are a subset // of the excluded nodes. Preconditions.checkState( Sets.intersection(getRoots().keySet(), getBody().keySet()).isEmpty()); Preconditions.checkState( Sets.intersection(getRoots().keySet(), getExcluded().keySet()).isEmpty()); Preconditions.checkState( Sets.intersection(getBody().keySet(), getExcluded().keySet()).isEmpty()); Preconditions.checkState(getExcluded().keySet().containsAll(getDeps().keySet())); } } @Value.Immutable @BuckStyleImmutable interface AbstractOmnibusRoot { @Value.Parameter SourcePath getPath(); } @Value.Immutable @BuckStyleImmutable interface AbstractOmnibusLibrary { @Value.Parameter String getSoname(); @Value.Parameter SourcePath getPath(); } @Value.Immutable @BuckStyleImmutable abstract static class AbstractOmnibusLibraries { @Value.Parameter public abstract ImmutableMap<BuildTarget, OmnibusRoot> getRoots(); @Value.Parameter public abstract ImmutableList<OmnibusLibrary> getLibraries(); } }
/* Copyright 2013-2016 Jason Leyba Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.github.jsdossier; import static com.github.jsdossier.testing.CompilerUtil.createSourceFile; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.github.jsdossier.annotations.Input; import com.github.jsdossier.jscomp.NominalType; import com.github.jsdossier.jscomp.TypeRegistry; import com.github.jsdossier.testing.CompilerUtil; import com.github.jsdossier.testing.GuiceRule; import java.nio.file.FileSystem; import javax.inject.Inject; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link TypeContext}. */ @RunWith(JUnit4.class) public class TypeContextTest { @Rule public GuiceRule guice = GuiceRule.builder(this) .setOutputDir("out") .setSourcePrefix("src") .setModulePrefix("src/modules") .build(); @Inject @Input private FileSystem fs; @Inject private CompilerUtil util; @Inject private TypeRegistry typeRegistry; @Inject private TypeContext context; @Test public void canResolveTypes() { util.compile( createSourceFile( fs.getPath("foo.js"), "goog.provide('foo');", "class A {}", "class B {}", "class C {}", "goog.scope(function() {", " let ns = foo;", " let A = B;", " let D = C;", " ns.X = class X {}", "});"), createSourceFile( fs.getPath("bar.js"), "goog.provide('bar');", "goog.scope(function() {", " let ns = bar;", " let A = C;", " let B = foo.X;", " ns.X = class Y {}", "});")); assertThat(context.resolveType("A")).isSameAs(typeRegistry.getType("A")); assertThat(context.resolveType("B")).isSameAs(typeRegistry.getType("B")); assertThat(context.resolveType("C")).isSameAs(typeRegistry.getType("C")); assertThat(context.resolveType("D")).isNull(); assertThat(context.resolveType("ns")).isNull(); assertThat(context.resolveType("ns.X")).isNull(); context = context.changeContext(typeRegistry.getType("foo.X")); assertThat(context.resolveType("A")).isSameAs(typeRegistry.getType("B")); assertThat(context.resolveType("B")).isSameAs(typeRegistry.getType("B")); assertThat(context.resolveType("C")).isSameAs(typeRegistry.getType("C")); assertThat(context.resolveType("D")).isSameAs(typeRegistry.getType("C")); assertThat(context.resolveType("ns")).isSameAs(typeRegistry.getType("foo")); assertThat(context.resolveType("ns.X")).isSameAs(typeRegistry.getType("foo.X")); context = context.changeContext(typeRegistry.getType("bar.X")); assertThat(context.resolveType("A")).isSameAs(typeRegistry.getType("C")); assertThat(context.resolveType("B")).isSameAs(typeRegistry.getType("foo.X")); assertThat(context.resolveType("C")).isSameAs(typeRegistry.getType("C")); assertThat(context.resolveType("D")).isNull(); assertThat(context.resolveType("ns")).isSameAs(typeRegistry.getType("bar")); assertThat(context.resolveType("ns.X")).isSameAs(typeRegistry.getType("bar.X")); } @Test public void canResolveTypesWithModuleContexts() { util.compile( createSourceFile(fs.getPath("globals.js"), "class A {}"), createSourceFile(fs.getPath("src/modules/foo.js"), "class A {}", "export {A as B}"), createSourceFile(fs.getPath("src/modules/dir/foo.js"), "class A {}", "export {A as C}")); NominalType typeA = typeRegistry.getType("A"); NominalType typeB = typeRegistry.getType("module$src$modules$foo.B"); NominalType typeC = typeRegistry.getType("module$src$modules$dir$foo.C"); NominalType moduleFoo = typeRegistry.getType("module$src$modules$foo"); NominalType moduleDirFoo = typeRegistry.getType("module$src$modules$dir$foo"); NominalType moduleFooTypeB = typeRegistry.getType("module$src$modules$foo.B"); NominalType moduleDirFooTypeC = typeRegistry.getType("module$src$modules$dir$foo.C"); assertThat(context.resolveType("A")).isSameAs(typeA); assertThat(context.resolveType("foo")).isSameAs(moduleFoo); assertThat(context.resolveType("foo.B")).isSameAs(moduleFooTypeB); assertThat(context.resolveType("dir/foo")).isSameAs(moduleDirFoo); assertThat(context.resolveType("dir/foo.C")).isSameAs(moduleDirFooTypeC); assertThat(context.resolveType("./foo")).isSameAs(moduleFoo); assertThat(context.resolveType("./foo.B")).isSameAs(moduleFooTypeB); assertThat(context.resolveType("./dir/foo")).isSameAs(moduleDirFoo); assertThat(context.resolveType("./dir/foo.C")).isSameAs(moduleDirFooTypeC); assertThat(context.resolveType("../foo")).isNull(); context = context.changeContext(moduleFoo); assertThat(context.resolveType("A")).isSameAs(typeB); assertThat(context.resolveType("foo")).isSameAs(moduleFoo); assertThat(context.resolveType("dir/foo")).isSameAs(moduleDirFoo); assertThat(context.resolveType("./foo")).isSameAs(moduleFoo); assertThat(context.resolveType("./dir/foo")).isSameAs(moduleDirFoo); assertThat(context.resolveType("../foo")).isNull(); context = context.changeContext(moduleDirFoo); assertThat(context.resolveType("A")).isSameAs(typeC); assertThat(context.resolveType("foo")).isSameAs(moduleFoo); assertThat(context.resolveType("dir/foo")).isSameAs(moduleDirFoo); assertThat(context.resolveType("./foo")).isSameAs(moduleDirFoo); assertThat(context.resolveType("./dir/foo")).isNull(); assertThat(context.resolveType("../foo")).isSameAs(moduleFoo); } @Test public void canResolveImportedTypeNames() { util.compile( createSourceFile(fs.getPath("globals.js"), "class A {}"), createSourceFile( fs.getPath("src/modules/foo.js"), "class A {}", "class B {}", "export {A as B, B as default}"), createSourceFile( fs.getPath("src/modules/bar.js"), "import Foo from './foo.js';", "import {default as Bar} from './foo.js';", "import Baz, {B, B as C} from './foo.js';", "", "export default class {}")); NominalType fooB = typeRegistry.getType("module$src$modules$foo.B"); NominalType fooDefault = typeRegistry.getType("module$src$modules$foo.default"); NominalType moduleBar = typeRegistry.getType("module$src$modules$bar"); context = context.changeContext(moduleBar); assertThat(context.resolveType("Foo")).isSameAs(fooDefault); assertThat(context.resolveType("Bar")).isSameAs(fooDefault); assertThat(context.resolveType("Baz")).isSameAs(fooDefault); assertThat(context.resolveType("B")).isSameAs(fooB); assertThat(context.resolveType("C")).isSameAs(fooB); } @Test public void doesNotResolveAsTypeWhenDefaultExportIsNotANominalType() { util.compile( createSourceFile(fs.getPath("src/modules/foo.js"), "export default 123;"), createSourceFile(fs.getPath("src/modules/bar.js"), "export default function() {}"), createSourceFile( fs.getPath("src/modules/baz.js"), "import foo from './foo.js';", "import bar from './bar.js';", "export default class {}")); NominalType module = typeRegistry.getType("module$src$modules$baz"); context = context.changeContext(module); assertThat(context.resolveType("foo")).isNull(); assertThat(context.resolveType("bar")).isNull(); } @Test public void canResolveReexportedTypes() { util.compile( createSourceFile( fs.getPath("src/modules/foo.js"), "export default class {}", "export class A {}"), createSourceFile( fs.getPath("src/modules/bar.js"), "export {default as B, A as C} from './foo.js';"), createSourceFile( fs.getPath("src/modules/baz.js"), "import {B, C} from './bar.js';", "export default class {}")); NominalType fooA = typeRegistry.getType("module$src$modules$foo.A"); NominalType fooDefault = typeRegistry.getType("module$src$modules$foo.default"); NominalType barB = typeRegistry.getType("module$src$modules$bar.B"); NominalType barC = typeRegistry.getType("module$src$modules$bar.C"); NominalType moduleBar = typeRegistry.getType("module$src$modules$bar"); NominalType moduleBaz = typeRegistry.getType("module$src$modules$baz"); context = context.changeContext(moduleBar); assertWithMessage("default is not a valid identifier") .that(context.resolveType("default")) .isNull(); assertWithMessage("A is not a valid reference name in module/bar") .that(context.resolveType("A")) .isNull(); assertWithMessage("in module/bar, B is a reference to foo.default") .that(context.resolveType("B")) .isSameAs(fooDefault); assertWithMessage("in module/bar, C is a reference to foo.A") .that(context.resolveType("C")) .isSameAs(fooA); context = context.changeContext(moduleBaz); assertThat(context.resolveType("B")).isSameAs(barB); assertThat(context.resolveType("C")).isSameAs(barC); assertThat(typeRegistry.getTypes(barB.getType())).containsExactly(fooDefault, barB).inOrder(); assertThat(typeRegistry.getTypes(barC.getType())).containsExactly(fooA, barC).inOrder(); } }
/* * Copyright @ 2015 - Present, 8x8 Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.videobridge.cc.allocation; import edu.umd.cs.findbugs.annotations.*; import kotlin.*; import org.jetbrains.annotations.*; import org.jitsi.nlj.*; import org.jitsi.utils.event.*; import org.jitsi.utils.logging.*; import org.jitsi.utils.logging2.Logger; import org.jitsi.videobridge.cc.config.*; import org.jitsi.videobridge.util.*; import org.json.simple.*; import java.lang.*; import java.lang.SuppressWarnings; import java.time.*; import java.util.*; import java.util.function.*; import java.util.stream.*; import static org.jitsi.videobridge.cc.allocation.PrioritizeKt.prioritize; import static org.jitsi.videobridge.cc.allocation.VideoConstraintsKt.prettyPrint; /** * * @author George Politis */ public class BandwidthAllocator<T extends MediaSourceContainer> { /** * Returns a boolean that indicates whether or not the current bandwidth estimation (in bps) has changed above the * configured threshold with respect to the previous bandwidth estimation. * * @param previousBwe the previous bandwidth estimation (in bps). * @param currentBwe the current bandwidth estimation (in bps). * @return true if the bandwidth has changed above the configured threshold, * false otherwise. */ private static boolean bweChangeIsLargerThanThreshold(long previousBwe, long currentBwe) { if (previousBwe == -1 || currentBwe == -1) { return true; } long deltaBwe = currentBwe - previousBwe; // If the bwe has increased, we should act upon it, otherwise we may end up in this broken situation: Suppose // that the target bitrate is 2.5Mbps, and that the last bandwidth allocation was performed with a 2.4Mbps // bandwidth estimate. The bridge keeps probing and, suppose that, eventually the bandwidth estimate reaches // 2.6Mbps, which is plenty to accommodate the target bitrate; but the minimum bandwidth estimate that would // trigger a new bandwidth allocation is 2.4Mbps + 2.4Mbps * 15% = 2.76Mbps. if (deltaBwe > 0) { return true; } // If, on the other hand, the bwe has decreased, we require at least a 15% drop in order to update the bitrate // allocation. This is an ugly hack to prevent too many resolution/UI changes in case the bridge produces too // low bandwidth estimate, at the risk of clogging the receiver's pipe. // TODO: do we still need this? Do we ever ever see BWE drop by <%15? return deltaBwe < -1 * previousBwe * BitrateControllerConfig.bweChangeThreshold(); } private final Logger logger; /** * The estimated available bandwidth in bits per second. */ private long bweBps = -1; /** * Provide the current list of endpoints (in no particular order). * TODO: Simplify to avoid the weird (and slow) flow involving `endpointsSupplier` and `sortedEndpointIds`. */ private final Supplier<List<T>> endpointsSupplier; /** * The "effective" constraints for an endpoint indicate the maximum resolution/fps that this * {@link BandwidthAllocator} would allocate for this endpoint given enough bandwidth. * * They are the constraints signaled by the receiver, further reduced to 0 when the endpoint is "outside lastN". * * Effective constraints are used to signal to video senders to reduce their resolution to the minimum that * satisfies all receivers. */ private Map<String, VideoConstraints> effectiveConstraints = Collections.emptyMap(); private final Clock clock; private final EventEmitter<EventHandler> eventEmitter = new SyncEventEmitter<>(); /** * Whether bandwidth allocation should be constrained to the available bandwidth (when {@code true}), or assume * infinite bandwidth (when {@code false}. */ private final Supplier<Boolean> trustBwe; /** * The allocations settings signalled by the receiver. */ private AllocationSettings allocationSettings = new AllocationSettings(); /** * The last time {@link BandwidthAllocator#update()} was called. */ private Instant lastUpdateTime = Instant.MIN; /** * The result of the bitrate control algorithm, the last time it ran. */ @NotNull private BandwidthAllocation allocation = new BandwidthAllocation(Collections.emptySet()); private final DiagnosticContext diagnosticContext; BandwidthAllocator( EventHandler eventHandler, Supplier<List<T>> endpointsSupplier, Supplier<Boolean> trustBwe, Logger parentLogger, DiagnosticContext diagnosticContext, Clock clock) { this.logger = parentLogger.createChildLogger(BandwidthAllocator.class.getName()); this.clock = clock; this.trustBwe = trustBwe; this.diagnosticContext = diagnosticContext; this.endpointsSupplier = endpointsSupplier; eventEmitter.addHandler(eventHandler); } /** * Gets a JSON representation of the parts of this object's state that are deemed useful for debugging. */ @SuppressWarnings("unchecked") @SuppressFBWarnings( value = "IS2_INCONSISTENT_SYNC", justification = "We intentionally avoid synchronizing while reading fields only used in debug output.") JSONObject getDebugState() { JSONObject debugState = new JSONObject(); debugState.put("trustBwe", BitrateControllerConfig.trustBwe()); debugState.put("bweBps", bweBps); debugState.put("allocation", allocation.getDebugState()); debugState.put("allocationSettings", allocationSettings.toJson()); debugState.put("effectiveConstraints", effectiveConstraints); return debugState; } @NotNull BandwidthAllocation getAllocation() { return allocation; } /** * Get the available bandwidth, taking into account the `trustBwe` option. */ private long getAvailableBandwidth() { return trustBwe.get() ? bweBps : Long.MAX_VALUE; } /** * Notify the {@link BandwidthAllocator} that the estimated available bandwidth has changed. * @param newBandwidthBps the newly estimated bandwidth in bps */ void bandwidthChanged(long newBandwidthBps) { if (!bweChangeIsLargerThanThreshold(bweBps, newBandwidthBps)) { logger.debug(() -> "New bandwidth (" + newBandwidthBps + ") is not significantly " + "changed from previous estimate (" + bweBps + "), ignoring"); // If this is a "negligible" change in the bandwidth estimation // wrt the last bandwidth estimation that we reacted to, then // do not update the bandwidth allocation. The goal is to limit // the resolution changes due to bandwidth estimation changes, // as often resolution changes can negatively impact user // experience, at the risk of clogging the receiver pipe. } else { logger.debug(() -> "new bandwidth is " + newBandwidthBps + ", updating"); bweBps = newBandwidthBps; update(); } } /** * Updates the allocation settings and calculates a new bitrate {@link BandwidthAllocation}. * @param allocationSettings the new allocation settings. */ void update(AllocationSettings allocationSettings) { this.allocationSettings = allocationSettings; update(); } /** * Runs the bandwidth allocation algorithm, and fires events if the result is different from the previous result. */ synchronized void update() { lastUpdateTime = clock.instant(); // Order the endpoints by selection, followed by speech activity. List<T> sortedEndpoints = prioritize(endpointsSupplier.get(), getSelectedEndpoints()); // Extract and update the effective constraints. Map<String, VideoConstraints> oldEffectiveConstraints = effectiveConstraints; effectiveConstraints = PrioritizeKt.getEffectiveConstraints(sortedEndpoints, allocationSettings); logger.trace(() -> "Allocating: sortedEndpoints=" + sortedEndpoints.stream().map(T::getId).collect(Collectors.joining(",")) + " effectiveConstraints=" + prettyPrint(effectiveConstraints)); // Compute the bandwidth allocation. BandwidthAllocation newAllocation = allocate(sortedEndpoints); boolean allocationChanged = !allocation.isTheSameAs(newAllocation); if (allocationChanged) { eventEmitter.fireEvent(handler -> { handler.allocationChanged(newAllocation); return Unit.INSTANCE; }); } allocation = newAllocation; boolean effectiveConstraintsChanged = !effectiveConstraints.equals(oldEffectiveConstraints); logger.trace(() -> "Finished allocation: allocationChanged=" + allocationChanged + " effectiveConstraintsChanged=" + effectiveConstraintsChanged); if (effectiveConstraintsChanged) { eventEmitter.fireEvent(handler -> { handler.effectiveVideoConstraintsChanged(oldEffectiveConstraints, effectiveConstraints); return Unit.INSTANCE; }); } } private List<String> getSelectedEndpoints() { // On-stage participants are considered selected (with higher prio). List<String> selectedEndpoints = new ArrayList<>(allocationSettings.getOnStageEndpoints()); allocationSettings.getSelectedEndpoints().forEach(selectedEndpoint -> { if (!selectedEndpoints.contains(selectedEndpoint)) { selectedEndpoints.add(selectedEndpoint); } }); return selectedEndpoints; } /** * Implements the bandwidth allocation algorithm for the given ordered list of endpoints. * * @param conferenceEndpoints the list of endpoints in order of priority to allocate for. * @return the new {@link BandwidthAllocation}. */ private synchronized @NotNull BandwidthAllocation allocate(List<T> conferenceEndpoints) { List<SingleSourceAllocation> sourceBitrateAllocations = createAllocations(conferenceEndpoints); if (sourceBitrateAllocations.isEmpty()) { return new BandwidthAllocation(Collections.emptySet()); } long maxBandwidth = getAvailableBandwidth(); long oldMaxBandwidth = -1; int[] oldTargetIndices = new int[sourceBitrateAllocations.size()]; int[] newTargetIndices = new int[sourceBitrateAllocations.size()]; Arrays.fill(newTargetIndices, -1); // The number of allocations with a selected layer. int numAllocationsWithVideo = 0; boolean oversending = false; while (oldMaxBandwidth != maxBandwidth) { oldMaxBandwidth = maxBandwidth; System.arraycopy(newTargetIndices, 0, oldTargetIndices, 0, oldTargetIndices.length); int newNumAllocationsWithVideo = 0; for (int i = 0; i < sourceBitrateAllocations.size(); i++) { SingleSourceAllocation sourceBitrateAllocation = sourceBitrateAllocations.get(i); if (sourceBitrateAllocation.getConstraints().getMaxHeight() <= 0) { continue; } maxBandwidth += sourceBitrateAllocation.getTargetBitrate(); // In stage view improve greedily until preferred, in tile view go step-by-step. sourceBitrateAllocation.improve(maxBandwidth, i == 0); maxBandwidth -= sourceBitrateAllocation.getTargetBitrate(); if (maxBandwidth < 0) { oversending = true; } newTargetIndices[i] = sourceBitrateAllocation.getTargetIdx(); if (sourceBitrateAllocation.getTargetIdx() != -1) { newNumAllocationsWithVideo++; } // In stage view, do not allocate bandwidth for thumbnails until the on-stage reaches "preferred". // This prevents enabling thumbnail only to disable them when bwe slightly increases allowing on-stage // to take more. if (sourceBitrateAllocation.isOnStage() && !sourceBitrateAllocation.hasReachedPreferred()) { break; } } if (numAllocationsWithVideo > newNumAllocationsWithVideo) { // rollback state to prevent jumps in the number of forwarded participants. for (int i = 0; i < sourceBitrateAllocations.size(); i++) { sourceBitrateAllocations.get(i).setTargetIdx(oldTargetIndices[i]); } break; } numAllocationsWithVideo = newNumAllocationsWithVideo; } // The endpoints which are in lastN, and are sending video, but were suspended due to bwe. List<String> suspendedIds = sourceBitrateAllocations.stream() .filter(SingleSourceAllocation::isSuspended) .map(ssa -> ssa.getEndpoint().getId()).collect(Collectors.toList()); if (!suspendedIds.isEmpty()) { logger.info("Endpoints were suspended due to insufficient bandwidth (bwe=" + getAvailableBandwidth() + " bps): " + String.join(",", suspendedIds)); } Set<SingleAllocation> allocations = new HashSet<>(); long targetBps = 0, idealBps = 0; for (SingleSourceAllocation sourceBitrateAllocation : sourceBitrateAllocations) { allocations.add(sourceBitrateAllocation.getResult()); targetBps += sourceBitrateAllocation.getTargetBitrate(); idealBps += sourceBitrateAllocation.getIdealBitrate(); } return new BandwidthAllocation(allocations, oversending, idealBps, targetBps); } /** * Query whether this allocator is forwarding a source from a given endpoint, as of its * most recent allocation decision. */ public boolean isForwarding(String endpointId) { return allocation.isForwarding(endpointId); } /** * Query whether the allocator has non-zero effective constraints for the given endpoint. */ public boolean hasNonZeroEffectiveConstraints(String endpointId) { VideoConstraints constraints = effectiveConstraints.get(endpointId); if (constraints == null) { return false; } return constraints.getMaxHeight() > 0; } private synchronized @NotNull List<SingleSourceAllocation> createAllocations(List<T> conferenceEndpoints) { // Init. List<SingleSourceAllocation> sourceBitrateAllocations = new ArrayList<>(conferenceEndpoints.size()); for (MediaSourceContainer endpoint : conferenceEndpoints) { MediaSourceDesc source = endpoint.getMediaSource(); if (source != null) { sourceBitrateAllocations.add( new SingleSourceAllocation( endpoint, // Note that we use the effective constraints and not the receiver's constraints // directly. This means we never even try to allocate bitrate to endpoints "outside // lastN". For example, if LastN=1 and the first endpoint sends a non-scalable // stream with bitrate higher that the available bandwidth, we will forward no // video at all instead of going to the second endpoint in the list. // I think this is not desired behavior. However, it is required for the "effective // constraints" to work as designed. effectiveConstraints.get(endpoint.getId()), allocationSettings.getOnStageEndpoints().contains(endpoint.getId()), diagnosticContext, clock)); } } return sourceBitrateAllocations; } /** * Submits a call to `update` in a CPU thread if bandwidth allocation has not been performed recently. */ void maybeUpdate() { if (Duration.between(lastUpdateTime, clock.instant()) .compareTo(BitrateControllerConfig.maxTimeBetweenCalculations()) > 0) { logger.debug("Forcing an update"); TaskPools.CPU_POOL.submit((Runnable) this::update); } } public interface EventHandler { default void allocationChanged(@NotNull BandwidthAllocation allocation) {} default void effectiveVideoConstraintsChanged( @NotNull Map<String, VideoConstraints> oldEffectiveConstraints, @NotNull Map<String, VideoConstraints> newEffectiveConstraints) {} } }
package com.sigmobile.dawebmail.fragments; import android.app.ProgressDialog; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.os.Handler; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.Snackbar; import android.support.v4.app.Fragment; import android.support.v4.content.LocalBroadcastManager; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.DefaultItemAnimator; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.animation.AnimationUtils; import android.widget.LinearLayout; import com.sigmobile.dawebmail.MainActivity; import com.sigmobile.dawebmail.R; import com.sigmobile.dawebmail.adapters.MailAdapter; import com.sigmobile.dawebmail.asyncTasks.MultiMailAction; import com.sigmobile.dawebmail.asyncTasks.MultiMailActionListener; import com.sigmobile.dawebmail.asyncTasks.RefreshInbox; import com.sigmobile.dawebmail.asyncTasks.RefreshInboxListener; import com.sigmobile.dawebmail.database.CurrentUser; import com.sigmobile.dawebmail.database.EmailMessage; import com.sigmobile.dawebmail.database.User; import com.sigmobile.dawebmail.utils.Constants; import java.util.ArrayList; import java.util.Arrays; import butterknife.Bind; import butterknife.ButterKnife; /** * Created by rish on 6/10/15. */ public class FolderFragment extends Fragment implements RefreshInboxListener, MultiMailActionListener, MailAdapter.MultiMailActionSelectedListener { @Bind(R.id.folder_empty_view) LinearLayout emptyLayout; @Bind(R.id.folder_recycleView) RecyclerView recyclerView; @Bind(R.id.swipeContainer) SwipeRefreshLayout swipeRefreshLayout; @Bind(R.id.folder_delete_fab) FloatingActionButton fabDelete; private MailAdapter mailAdapter; private ProgressDialog progressDialog, progressDialog2; private ArrayList<EmailMessage> allEmails; private User currentUser; private String folder; private MenuItem selectAll; private boolean markedMails[]; public FolderFragment() { } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_folder, container, false); ButterKnife.bind(FolderFragment.this, rootView); Bundle args = getArguments(); folder = args.getString(Constants.FOLDER, Constants.SENT); if (folder.equals(Constants.SENT)) ((AppCompatActivity) getActivity()).getSupportActionBar().setTitle(getString(R.string.sent)); else if (folder.equals(Constants.TRASH)) ((AppCompatActivity) getActivity()).getSupportActionBar().setTitle(getString(R.string.trash)); currentUser = CurrentUser.getCurrentUser(getActivity()); progressDialog = new ProgressDialog(getActivity()); registerInternalBroadcastReceivers(); setupMailAdapter(); setupSwipeRefreshLayout(); setupSearchBar(); new RefreshInbox(currentUser, getActivity(), FolderFragment.this, folder, Constants.REFRESH_TYPE_REFRESH).execute(); swipeRefreshLayout.setVisibility(View.GONE); return rootView; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if(savedInstanceState != null) markedMails = savedInstanceState.getBooleanArray("markedEmails"); else markedMails = null; setHasOptionsMenu(true); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putBooleanArray("markedEmails", mailAdapter.getMarkedMails()); } @Override public void onResume() { super.onResume(); /** * This is done for maintaining the fragment lifecycle. Read onPostRefresh comment. **/ Bundle bundle = getArguments(); if (bundle != null && bundle.getInt(Constants.BUNDLE_ON_POST_REFRESH_EMAILS_SIZE, -1) != -1) { onPostRefresh(bundle.getInt(Constants.BUNDLE_ON_POST_REFRESH_EMAILS_SIZE)); } } private void registerInternalBroadcastReceivers() { LocalBroadcastManager.getInstance(getActivity()).registerReceiver(new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { refreshAdapter(); } }, new IntentFilter(Constants.BROADCAST_REFRESH_ADAPTERS)); } private void setupMailAdapter() { allEmails = new ArrayList<>(); mailAdapter = new MailAdapter(allEmails, getActivity(), this, folder); RecyclerView.LayoutManager mLayoutManager = new LinearLayoutManager(getActivity().getApplicationContext()); recyclerView.setLayoutManager(mLayoutManager); recyclerView.setItemAnimator(new DefaultItemAnimator()); recyclerView.setAdapter(mailAdapter); } private void setupSearchBar() { } private void setupSwipeRefreshLayout() { swipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { new RefreshInbox(currentUser, getActivity(), FolderFragment.this, folder, Constants.REFRESH_TYPE_REFRESH).execute(); } }); swipeRefreshLayout.setColorSchemeResources(android.R.color.holo_blue_dark, android.R.color.holo_blue_light, android.R.color.darker_gray, android.R.color.holo_blue_dark); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.fragment_folder_menu, menu); selectAll = menu.getItem(0); } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.action_logout) { logout(); }else if(id == R.id.action_selectall){ if(item.isChecked()){ item.setChecked(false); item.setIcon(R.drawable.ic_action_selectall_unchecked); selectAllMails(false); setupSelectAll(false); } else{ item.setChecked(true); item.setIcon(R.drawable.ic_action_selectall_checked); selectAllMails(true); } } return super.onOptionsItemSelected(item); } @Override public void onPreRefresh() { progressDialog2 = ProgressDialog.show(getActivity(), "", getString(R.string.dialog_msg_loading), true); progressDialog2.setCancelable(false); progressDialog2.show(); } @Override public void onPostRefresh(boolean success, final ArrayList<EmailMessage> refreshedEmails, User user) { allEmails = new ArrayList<>(refreshedEmails); /** * This is done for maintaining the fragment lifecycle. * Check if the fragment is attached to the activity * if it isn't, then set bundle stating that a refresh is required. */ if (getFragmentManager() != null) { FolderFragment thisFragment = (FolderFragment) getFragmentManager().findFragmentByTag(Constants.FRAGMENT_TAG_FOLDER+folder); if (thisFragment != null) { if (!thisFragment.isAdded()) { if (thisFragment != null) { Bundle bundle = new Bundle(); bundle.putInt(Constants.BUNDLE_ON_POST_REFRESH_EMAILS_SIZE, refreshedEmails.size()); thisFragment.setArguments(bundle); } } else { onPostRefresh(refreshedEmails.size()); } } else { refreshAdapter(); progressDialog2.dismiss(); swipeRefreshLayout.setRefreshing(false); } } else { refreshAdapter(); progressDialog2.dismiss(); swipeRefreshLayout.setRefreshing(false); } } private void onPostRefresh(final int refreshedEmailsSize) { new Handler().post(new Runnable() { @Override public void run() { refreshAdapter(); if (refreshedEmailsSize == 0) Snackbar.make(swipeRefreshLayout, getString(R.string.snackbar_new_webmail_zero), Snackbar.LENGTH_LONG).show(); else if (refreshedEmailsSize == 1) Snackbar.make(swipeRefreshLayout, getString(R.string.snackbar_new_webmail_one), Snackbar.LENGTH_LONG).show(); else Snackbar.make(swipeRefreshLayout, refreshedEmailsSize + getString(R.string.snackbar_new_webmail_many), Snackbar.LENGTH_LONG).show(); progressDialog2.dismiss(); if (!allEmails.isEmpty()) { emptyLayout.setVisibility(View.GONE); swipeRefreshLayout.setVisibility(View.VISIBLE); } else { emptyLayout.setVisibility(View.VISIBLE); swipeRefreshLayout.setVisibility(View.VISIBLE); } } }); swipeRefreshLayout.setRefreshing(false); } @Override public void onPreMultiMailAction() { progressDialog = ProgressDialog.show(getActivity(), "", getString(R.string.dialog_msg_attempting_action)); progressDialog.show(); } @Override public void onPostMultiMailAction(boolean success, String mailAction, ArrayList<EmailMessage> emailsForMultiAction) { if (!success) Snackbar.make(swipeRefreshLayout, getString(R.string.snackbar_delete_unsuccessful), Snackbar.LENGTH_LONG).show(); else Snackbar.make(swipeRefreshLayout, getString(R.string.snackbar_delete_successful), Snackbar.LENGTH_LONG).show(); progressDialog.dismiss(); refreshAdapter(); fabDelete.setVisibility(View.GONE); new RefreshInbox(currentUser, getActivity(), FolderFragment.this, folder, Constants.REFRESH_TYPE_REFRESH).execute(); } public void refreshAdapter() { mailAdapter = new MailAdapter(allEmails, getActivity(), this, folder); RecyclerView.LayoutManager mLayoutManager = new LinearLayoutManager(getContext()); recyclerView.setLayoutManager(mLayoutManager); recyclerView.setItemAnimator(new DefaultItemAnimator()); restoreMarkedMails(); recyclerView.setAdapter(mailAdapter); } public void logout() { ((MainActivity) getActivity()).showLogoutDialog(currentUser); } @Override public void onItemClickedForDelete(final ArrayList<EmailMessage> emailsMarkedForAction) { fabDelete.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Snackbar.make(swipeRefreshLayout, getString(R.string.snackbar_deleting), Snackbar.LENGTH_LONG).show(); new MultiMailAction(currentUser, getActivity(), FolderFragment.this, emailsMarkedForAction, getString(R.string.msg_action_delete)).execute(); } }); if (!emailsMarkedForAction.isEmpty()) { if (fabDelete.getVisibility() != View.VISIBLE) { fabDelete.setVisibility(View.VISIBLE); fabDelete.startAnimation(AnimationUtils.loadAnimation(getActivity(), R.anim.abc_slide_in_bottom)); setupSelectAll(true); } } else { if (fabDelete.getVisibility() != View.GONE) { fabDelete.startAnimation(AnimationUtils.loadAnimation(getActivity(), R.anim.abc_slide_out_bottom)); fabDelete.setVisibility(View.GONE); setupSelectAll(false); } } } private void setupSelectAll(boolean set){ if(selectAll != null){ if(set){ selectAll.setVisible(true); selectAll.setEnabled(true); } else{ selectAll.setVisible(false); selectAll.setEnabled(false); } } } private void selectAllMails(boolean select){ markedMails = new boolean[ allEmails.size() ]; if(select){ Arrays.fill(markedMails, true); } else{ Arrays.fill(markedMails, false); } refreshAdapter(); } private void restoreMarkedMails(){ if(markedMails != null) { mailAdapter.restoreMarkedMails(markedMails); markedMails = null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.runners; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import java.io.Serializable; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.beam.sdk.Pipeline.PipelineVisitor; import org.apache.beam.sdk.Pipeline.PipelineVisitor.Defaults; import org.apache.beam.sdk.io.CountingInput; import org.apache.beam.sdk.io.CountingInput.UnboundedCountingInput; import org.apache.beam.sdk.io.CountingSource; import org.apache.beam.sdk.io.Read; import org.apache.beam.sdk.runners.PTransformOverrideFactory.ReplacementOutput; import org.apache.beam.sdk.runners.TransformHierarchy.Node; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.ParDo.MultiOutput; import org.apache.beam.sdk.transforms.ParDo.SingleOutput; import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollection.IsBounded; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; import org.apache.beam.sdk.values.PValue; import org.apache.beam.sdk.values.TaggedPValue; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests for {@link TransformHierarchy}. */ @RunWith(JUnit4.class) public class TransformHierarchyTest implements Serializable { @Rule public final transient TestPipeline pipeline = TestPipeline.create().enableAbandonedNodeEnforcement(false); @Rule public transient ExpectedException thrown = ExpectedException.none(); private transient TransformHierarchy hierarchy; @Before public void setup() { hierarchy = new TransformHierarchy(pipeline); } @Test public void getCurrentNoPushReturnsRoot() { assertThat(hierarchy.getCurrent().isRootNode(), is(true)); } @Test public void pushWithoutPushFails() { thrown.expect(IllegalStateException.class); hierarchy.popNode(); } @Test public void pushThenPopSucceeds() { TransformHierarchy.Node root = hierarchy.getCurrent(); TransformHierarchy.Node node = hierarchy.pushNode("Create", PBegin.in(pipeline), Create.of(1)); assertThat(hierarchy.getCurrent(), equalTo(node)); hierarchy.popNode(); assertThat(node.finishedSpecifying, is(true)); assertThat(hierarchy.getCurrent(), equalTo(root)); } @Test public void emptyCompositeSucceeds() { PCollection<Long> created = PCollection.createPrimitiveOutputInternal( pipeline, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); TransformHierarchy.Node node = hierarchy.pushNode("Create", PBegin.in(pipeline), Create.of(1)); hierarchy.setOutput(created); hierarchy.popNode(); PCollectionList<Long> pcList = PCollectionList.of(created); TransformHierarchy.Node emptyTransform = hierarchy.pushNode( "Extract", pcList, new PTransform<PCollectionList<Long>, PCollection<Long>>() { @Override public PCollection<Long> expand(PCollectionList<Long> input) { return input.get(0); } }); hierarchy.setOutput(created); hierarchy.popNode(); assertThat(hierarchy.getProducer(created), equalTo(node)); assertThat( "A Transform that produces non-primitive output should be composite", emptyTransform.isCompositeNode(), is(true)); } @Test public void producingOwnAndOthersOutputsFails() { PCollection<Long> created = PCollection.createPrimitiveOutputInternal( pipeline, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); hierarchy.pushNode("Create", PBegin.in(pipeline), Create.of(1)); hierarchy.setOutput(created); hierarchy.popNode(); PCollectionList<Long> pcList = PCollectionList.of(created); final PCollectionList<Long> appended = pcList.and( PCollection.<Long>createPrimitiveOutputInternal( pipeline, WindowingStrategy.globalDefault(), IsBounded.BOUNDED)); hierarchy.pushNode( "AddPc", pcList, new PTransform<PCollectionList<Long>, PCollectionList<Long>>() { @Override public PCollectionList<Long> expand(PCollectionList<Long> input) { return appended; } }); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("contains a primitive POutput produced by it"); thrown.expectMessage("AddPc"); thrown.expectMessage("Create"); thrown.expectMessage(appended.expand().toString()); hierarchy.setOutput(appended); } @Test public void producingOwnOutputWithCompositeFails() { final PCollection<Long> comp = PCollection.createPrimitiveOutputInternal( pipeline, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); PTransform<PBegin, PCollection<Long>> root = new PTransform<PBegin, PCollection<Long>>() { @Override public PCollection<Long> expand(PBegin input) { return comp; } }; hierarchy.pushNode("Composite", PBegin.in(pipeline), root); Create.Values<Integer> create = Create.of(1); hierarchy.pushNode("Create", PBegin.in(pipeline), create); hierarchy.setOutput(pipeline.apply(create)); hierarchy.popNode(); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("contains a primitive POutput produced by it"); thrown.expectMessage("primitive transforms are permitted to produce"); thrown.expectMessage("Composite"); hierarchy.setOutput(comp); } @Test public void replaceSucceeds() { PTransform<?, ?> enclosingPT = new PTransform<PInput, POutput>() { @Override public POutput expand(PInput input) { return PDone.in(input.getPipeline()); } }; TransformHierarchy.Node enclosing = hierarchy.pushNode("Enclosing", PBegin.in(pipeline), enclosingPT); Create.Values<Long> originalTransform = Create.of(1L); TransformHierarchy.Node original = hierarchy.pushNode("Create", PBegin.in(pipeline), originalTransform); assertThat(hierarchy.getCurrent(), equalTo(original)); PCollection<Long> originalOutput = pipeline.apply(originalTransform); hierarchy.setOutput(originalOutput); hierarchy.popNode(); assertThat(original.finishedSpecifying, is(true)); hierarchy.setOutput(PDone.in(pipeline)); hierarchy.popNode(); assertThat(hierarchy.getCurrent(), not(equalTo(enclosing))); Read.Bounded<Long> replacementTransform = Read.from(CountingSource.upTo(1L)); PCollection<Long> replacementOutput = pipeline.apply(replacementTransform); Node replacement = hierarchy.replaceNode(original, PBegin.in(pipeline), replacementTransform); assertThat(hierarchy.getCurrent(), equalTo(replacement)); hierarchy.setOutput(replacementOutput); TaggedPValue taggedOriginal = Iterables.getOnlyElement(originalOutput.expand()); TaggedPValue taggedReplacement = Iterables.getOnlyElement(replacementOutput.expand()); Map<PValue, ReplacementOutput> replacementOutputs = Collections.<PValue, ReplacementOutput>singletonMap( replacementOutput, ReplacementOutput.of(taggedOriginal, taggedReplacement)); hierarchy.replaceOutputs(replacementOutputs); assertThat(replacement.getInputs(), equalTo(original.getInputs())); assertThat(replacement.getEnclosingNode(), equalTo(original.getEnclosingNode())); assertThat(replacement.getEnclosingNode(), equalTo(enclosing)); assertThat( replacement.getTransform(), Matchers.<PTransform<?, ?>>equalTo(replacementTransform)); // THe tags of the replacement transform are matched to the appropriate PValues of the original assertThat( replacement.getOutputs(), contains(TaggedPValue.of(taggedReplacement.getTag(), taggedOriginal.getValue()))); hierarchy.popNode(); } @Test public void replaceWithCompositeSucceeds() { final SingleOutput<Long, Long> originalParDo = ParDo.of( new DoFn<Long, Long>() { @ProcessElement public void processElement(ProcessContext ctxt) { ctxt.output(ctxt.element() + 1L); } }); UnboundedCountingInput genUpstream = CountingInput.unbounded(); PCollection<Long> upstream = pipeline.apply(genUpstream); PCollection<Long> output = upstream.apply("Original", originalParDo); hierarchy.pushNode("Upstream", pipeline.begin(), genUpstream); hierarchy.finishSpecifyingInput(); hierarchy.setOutput(upstream); hierarchy.popNode(); TransformHierarchy.Node original = hierarchy.pushNode("Original", upstream, originalParDo); hierarchy.finishSpecifyingInput(); hierarchy.setOutput(output); hierarchy.popNode(); final TupleTag<Long> longs = new TupleTag<>(); final MultiOutput<Long, Long> replacementParDo = ParDo.of( new DoFn<Long, Long>() { @ProcessElement public void processElement(ProcessContext ctxt) { ctxt.output(ctxt.element() + 1L); } }) .withOutputTags(longs, TupleTagList.empty()); PTransform<PCollection<Long>, PCollection<Long>> replacementComposite = new PTransform<PCollection<Long>, PCollection<Long>>() { @Override public PCollection<Long> expand(PCollection<Long> input) { return input.apply("Contained", replacementParDo).get(longs); } }; PCollectionTuple replacementOutput = upstream.apply("Contained", replacementParDo); Node compositeNode = hierarchy.replaceNode(original, upstream, replacementComposite); Node replacementParNode = hierarchy.pushNode("Original/Contained", upstream, replacementParDo); hierarchy.finishSpecifyingInput(); hierarchy.setOutput(replacementOutput); hierarchy.popNode(); hierarchy.setOutput(replacementOutput.get(longs)); TaggedPValue originalLongs = Iterables.getOnlyElement(output.expand()); TaggedPValue replacementLongs = Iterables.getOnlyElement(replacementOutput.expand()); hierarchy.replaceOutputs( Collections.<PValue, ReplacementOutput>singletonMap( replacementOutput.get(longs), ReplacementOutput.of(originalLongs, replacementLongs))); assertThat( replacementParNode.getOutputs(), contains(TaggedPValue.of(replacementLongs.getTag(), originalLongs.getValue()))); assertThat( compositeNode.getOutputs(), contains( TaggedPValue.of( Iterables.getOnlyElement(replacementOutput.get(longs).expand()).getTag(), originalLongs.getValue()))); hierarchy.popNode(); } @Test public void visitVisitsAllPushed() { TransformHierarchy.Node root = hierarchy.getCurrent(); PBegin begin = PBegin.in(pipeline); Create.Values<Long> create = Create.of(1L); Read.Bounded<Long> read = Read.from(CountingSource.upTo(1L)); PCollection<Long> created = PCollection.createPrimitiveOutputInternal( pipeline, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); SingleOutput<Long, Long> pardo = ParDo.of( new DoFn<Long, Long>() { @ProcessElement public void processElement(ProcessContext ctxt) { ctxt.output(ctxt.element()); } }); PCollection<Long> mapped = PCollection.createPrimitiveOutputInternal( pipeline, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); TransformHierarchy.Node compositeNode = hierarchy.pushNode("Create", begin, create); hierarchy.finishSpecifyingInput(); assertThat(hierarchy.getCurrent(), equalTo(compositeNode)); assertThat(compositeNode.getInputs(), Matchers.emptyIterable()); assertThat(compositeNode.getTransform(), Matchers.<PTransform<?, ?>>equalTo(create)); // Not yet set assertThat(compositeNode.getOutputs(), Matchers.emptyIterable()); assertThat(compositeNode.getEnclosingNode().isRootNode(), is(true)); TransformHierarchy.Node primitiveNode = hierarchy.pushNode("Create/Read", begin, read); assertThat(hierarchy.getCurrent(), equalTo(primitiveNode)); hierarchy.finishSpecifyingInput(); hierarchy.setOutput(created); hierarchy.popNode(); assertThat( fromTaggedValues(primitiveNode.getOutputs()), Matchers.<PValue>containsInAnyOrder(created)); assertThat(primitiveNode.getInputs(), Matchers.<TaggedPValue>emptyIterable()); assertThat(primitiveNode.getTransform(), Matchers.<PTransform<?, ?>>equalTo(read)); assertThat(primitiveNode.getEnclosingNode(), equalTo(compositeNode)); hierarchy.setOutput(created); // The composite is listed as outputting a PValue created by the contained primitive assertThat( fromTaggedValues(compositeNode.getOutputs()), Matchers.<PValue>containsInAnyOrder(created)); // The producer of that PValue is still the primitive in which it is first output assertThat(hierarchy.getProducer(created), equalTo(primitiveNode)); hierarchy.popNode(); TransformHierarchy.Node otherPrimitive = hierarchy.pushNode("ParDo", created, pardo); hierarchy.finishSpecifyingInput(); hierarchy.setOutput(mapped); hierarchy.popNode(); final Set<TransformHierarchy.Node> visitedCompositeNodes = new HashSet<>(); final Set<TransformHierarchy.Node> visitedPrimitiveNodes = new HashSet<>(); final Set<PValue> visitedValuesInVisitor = new HashSet<>(); Set<PValue> visitedValues = hierarchy.visit( new PipelineVisitor.Defaults() { @Override public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) { visitedCompositeNodes.add(node); return CompositeBehavior.ENTER_TRANSFORM; } @Override public void visitPrimitiveTransform(TransformHierarchy.Node node) { visitedPrimitiveNodes.add(node); } @Override public void visitValue(PValue value, TransformHierarchy.Node producer) { visitedValuesInVisitor.add(value); } }); assertThat(visitedCompositeNodes, containsInAnyOrder(root, compositeNode)); assertThat(visitedPrimitiveNodes, containsInAnyOrder(primitiveNode, otherPrimitive)); assertThat(visitedValuesInVisitor, Matchers.<PValue>containsInAnyOrder(created, mapped)); assertThat(visitedValuesInVisitor, equalTo(visitedValues)); } /** * Tests that visiting the {@link TransformHierarchy} after replacing nodes does not visit any * of the original nodes or inaccessible values but does visit all of the replacement nodes, * new inaccessible replacement values, and the original output values. */ @Test public void visitAfterReplace() { Node root = hierarchy.getCurrent(); final SingleOutput<Long, Long> originalParDo = ParDo.of( new DoFn<Long, Long>() { @ProcessElement public void processElement(ProcessContext ctxt) { ctxt.output(ctxt.element() + 1L); } }); UnboundedCountingInput genUpstream = CountingInput.unbounded(); PCollection<Long> upstream = pipeline.apply(genUpstream); PCollection<Long> output = upstream.apply("Original", originalParDo); Node upstreamNode = hierarchy.pushNode("Upstream", pipeline.begin(), genUpstream); hierarchy.finishSpecifyingInput(); hierarchy.setOutput(upstream); hierarchy.popNode(); Node original = hierarchy.pushNode("Original", upstream, originalParDo); hierarchy.finishSpecifyingInput(); hierarchy.setOutput(output); hierarchy.popNode(); final TupleTag<Long> longs = new TupleTag<>(); final MultiOutput<Long, Long> replacementParDo = ParDo.of( new DoFn<Long, Long>() { @ProcessElement public void processElement(ProcessContext ctxt) { ctxt.output(ctxt.element() + 1L); } }) .withOutputTags(longs, TupleTagList.empty()); PTransform<PCollection<Long>, PCollection<Long>> replacementComposite = new PTransform<PCollection<Long>, PCollection<Long>>() { @Override public PCollection<Long> expand(PCollection<Long> input) { return input.apply("Contained", replacementParDo).get(longs); } }; PCollectionTuple replacementOutput = upstream.apply("Contained", replacementParDo); Node compositeNode = hierarchy.replaceNode(original, upstream, replacementComposite); Node replacementParNode = hierarchy.pushNode("Original/Contained", upstream, replacementParDo); hierarchy.finishSpecifyingInput(); hierarchy.setOutput(replacementOutput); hierarchy.popNode(); hierarchy.setOutput(replacementOutput.get(longs)); TaggedPValue originalLongs = Iterables.getOnlyElement(output.expand()); TaggedPValue replacementLongs = Iterables.getOnlyElement(replacementOutput.expand()); hierarchy.replaceOutputs( Collections.<PValue, ReplacementOutput>singletonMap( replacementOutput.get(longs), ReplacementOutput.of(originalLongs, replacementLongs))); hierarchy.popNode(); final Set<Node> visitedCompositeNodes = new HashSet<>(); final Set<Node> visitedPrimitiveNodes = new HashSet<>(); Set<PValue> visitedValues = hierarchy.visit( new Defaults() { @Override public CompositeBehavior enterCompositeTransform(Node node) { visitedCompositeNodes.add(node); return CompositeBehavior.ENTER_TRANSFORM; } @Override public void visitPrimitiveTransform(Node node) { visitedPrimitiveNodes.add(node); } }); /* Final Graph: Upstream -> Upstream.out -> Composite -> (ReplacementParDo -> OriginalParDo.out) */ assertThat(visitedCompositeNodes, containsInAnyOrder(root, compositeNode)); assertThat(visitedPrimitiveNodes, containsInAnyOrder(upstreamNode, replacementParNode)); assertThat(visitedValues, Matchers.<PValue>containsInAnyOrder(upstream, output)); } private static List<PValue> fromTaggedValues(List<TaggedPValue> taggedValues) { return Lists.transform( taggedValues, new Function<TaggedPValue, PValue>() { @Override public PValue apply(TaggedPValue input) { return input.getValue(); } }); } }
package de.felixbruns.jotify.gateway; import java.io.IOException; import java.nio.charset.Charset; import java.util.*; import java.util.concurrent.*; import com.sun.net.httpserver.HttpExchange; import de.felixbruns.jotify.cache.*; import de.felixbruns.jotify.crypto.RSA; import de.felixbruns.jotify.exceptions.*; import de.felixbruns.jotify.gateway.stream.ChannelStreamer; import de.felixbruns.jotify.media.*; import de.felixbruns.jotify.media.parser.*; import de.felixbruns.jotify.player.*; import de.felixbruns.jotify.protocol.*; import de.felixbruns.jotify.protocol.channel.*; public class GatewayConnection implements Runnable, CommandListener, Player { private Session session; private Protocol protocol; private User user; private Semaphore wait; private Cache cache; private long timeout; private TimeUnit unit; private GatewayPlayer player; /** * Enum for browsing media. */ public enum BrowseType { ARTIST(1), ALBUM(2), TRACK(3); private int value; private BrowseType(int value){ this.value = value; } public int getValue(){ return this.value; } public static BrowseType valueOf(int value){ for(BrowseType type : BrowseType.values()){ if(type.value == value){ return type; } } return null; } } /** * Create a new GatewayConnection using the default {@link Cache} * implementation and timeout value (10 seconds). */ public GatewayConnection(){ this(new FileCache(), 10, TimeUnit.SECONDS); } /** * Create a new GatewayConnection using a specified {@link Cache} * implementation and timeout. Note: A {@link TimeoutException} * may also be caused by geographical restrictions. * * @param cache Cache implementation to use. * @param timeout Timeout value to use. * @param unit TimeUnit to use for timeout. * * @see MemoryCache * @see FileCache */ public GatewayConnection(Cache cache, long timeout, TimeUnit unit){ this.session = new Session(); this.protocol = null; this.user = null; this.wait = new Semaphore(2); this.cache = cache; this.timeout = timeout; this.unit = unit; this.player = null; /* Acquire permits (country, prodinfo). */ this.wait.acquireUninterruptibly(2); } /** * Set timeout for requests. * * @param timeout Timeout value to use. * @param unit TimeUnit to use for timeout. */ public void setTimeout(long timeout, TimeUnit unit){ this.timeout = timeout; this.unit = unit; } /** * Login to Spotify using the specified username and password. * * @param username Username to use. * @param password Corresponding password. * * @throws ConnectionException * @throws AuthenticationException */ public void login(String username, String password) throws ConnectionException, AuthenticationException { /* Check if we're already logged in. */ if(this.protocol != null){ throw new IllegalStateException("Already logged in!"); } /* Authenticate session and get protocol. */ this.protocol = this.session.authenticate(username, password); /* Create user object. */ this.user = new User(username); /* Create player. */ this.player = new GatewayPlayer(this.protocol); /* Add command handler. */ this.protocol.addListener(this); } /** * Closes the connection to a Spotify server. * * @throws ConnectionException */ public void close() throws ConnectionException { /* This will make receivePacket return immediately. */ if(this.protocol != null){ this.protocol.disconnect(); } /* Reset protocol to 'null'. */ this.protocol = null; } /** * Continuously receives packets in order to handle them. * Use a {@link Thread} to run this. */ public void run(){ /* Check if we're logged in. */ if(this.protocol == null){ throw new IllegalStateException("You need to login first!"); } /* Continuously receive packets until connection is closed. */ try{ while(true){ this.protocol.receivePacket(); } } catch(ProtocolException e){ /* Connection was closed. */ } } /** * Get user info. * * @return A xml string. */ public String user(){ /* Wait for data to become available (country, prodinfo). */ try{ if(!this.wait.tryAcquire(2, this.timeout, this.unit)){ throw new TimeoutException("Timeout while waiting for user data."); } } catch(InterruptedException e){ throw new RuntimeException(e); } catch(TimeoutException e){ throw new RuntimeException(e); } /* Release so this can be called again. */ this.wait.release(2); /* Build xml string. */ String xml = "<user>" + "<name>" + this.user.getName() + "</name>" + "<country>" + this.user.getCountry() + "</country>" + "<type>" + this.user.getProperty("type") + "</type>" + "</user>"; return xml; } /** * Fetch a toplist. * * @param type A toplist type. e.g. "artist", "album" or "track". * @param region A region code or null. e.g. "SE" or "DE". * @param username A username or null. * * @return A xml string. */ public String toplist(String type, String region, String username) throws TimeoutException { /* Create channel callback and parameter map. */ ChannelCallback callback = new ChannelCallback(); Map<String, String> params = new HashMap<String, String>(); /* Add parameters. */ params.put("type", type); params.put("region", region); params.put("username", username); /* Send search query. */ try{ this.protocol.sendToplistRequest(callback, params); } catch(ProtocolException e){ return null; } /* Get data. */ byte[] data = callback.get(this.timeout, this.unit); /* Return xml string. */ return new String(data, Charset.forName("UTF-8")); } /** * Search for an artist, album or track. * * @param query Your search query. * * @return A xml string. */ public String search(String query) throws TimeoutException { /* Create channel callback */ ChannelCallback callback = new ChannelCallback(); /* Send search query. */ try{ this.protocol.sendSearchQuery(callback, query); } catch(ProtocolException e){ return null; } /* Get data. */ byte[] data = callback.get(this.timeout, this.unit); /* Return xml string. */ return new String(data, Charset.forName("UTF-8")); } /** * Get an image (e.g. artist portrait or cover) by requesting * it from the server. * * @param id Id of the image to get. * * @return An array of bytes. */ public byte[] image(String id) throws TimeoutException { /* Data buffer. */ byte[] data; /* Check cache. */ if(this.cache != null && this.cache.contains("image", id)){ data = this.cache.load("image", id); } else{ /* Create channel callback */ ChannelCallback callback = new ChannelCallback(); /* Send image request. */ try{ this.protocol.sendImageRequest(callback, id); } catch(ProtocolException e){ return null; } /* Get data. */ data = callback.get(this.timeout, this.unit); /* Save to cache. */ if(this.cache != null){ this.cache.store("image", id, data); } } /* Return image data. */ return data; } /** * Browse artist, album or track info. * * @param type Type of media to browse for. * @param id Id of media to browse. * * @return A xml string. * * @see BrowseType */ public String browse(BrowseType type, String id) throws TimeoutException { /* Create channel callback */ ChannelCallback callback = new ChannelCallback(); /* Send browse request. */ try{ this.protocol.sendBrowseRequest(callback, type.getValue(), id); } catch(ProtocolException e){ return null; } /* Get data. */ byte[] data = callback.get(this.timeout, this.unit); /* Load XML. */ return new String(data, Charset.forName("UTF-8")); } /** * Browse multiple tracks info. * * @param ids Ids of tracks to browse. * * @return A xml string. */ public String browse(Collection<String> ids) throws TimeoutException { /* Create channel callback */ ChannelCallback callback = new ChannelCallback(); /* Send browse request. */ try{ this.protocol.sendBrowseRequest(callback, BrowseType.TRACK.getValue(), ids); } catch(ProtocolException e){ return null; } /* Get data. */ byte[] data = callback.get(this.timeout, this.unit); /* Load XML. */ return new String(data, Charset.forName("UTF-8")); } /** * Get a list of stored playlists. * * @return A xml string. */ public String playlistContainer() throws TimeoutException { /* Create channel callback. */ ChannelCallback callback = new ChannelCallback(); /* Send stored playlists request. */ try{ this.protocol.sendPlaylistRequest(callback, null); } catch(ProtocolException e){ return null; } /* Get data and inflate it. */ byte[] data = callback.get(this.timeout, this.unit); /* Return string. */ return "<?xml version=\"1.0\" encoding=\"utf-8\" ?><playlist>" + new String(data, Charset.forName("UTF-8")) + "</playlist>"; } /** * Get a playlist. * * @param id Id of the playlist to load. * * @return A xml string. */ public String playlist(String id) throws TimeoutException { /* Create channel callback */ ChannelCallback callback = new ChannelCallback(); /* Send browse request. */ try{ this.protocol.sendPlaylistRequest(callback, id); } catch(ProtocolException e){ return null; } /* Get data and inflate it. */ byte[] data = callback.get(this.timeout, this.unit); /* Return string. */ return "<?xml version=\"1.0\" encoding=\"utf-8\" ?><playlist>" + new String(data, Charset.forName("UTF-8")) + "</playlist>"; } /** * Stream a track to an output stream. * * @throws IOException */ public void stream(String id, String fileId, HttpExchange exchange) throws IOException, TimeoutException { /* Browse track. */ Track track = new Track(id); track.addFile(new File(fileId, "")); /* Create channel callbacks. */ ChannelCallback callback = new ChannelCallback(); /* Send play request (token notify + AES key). */ try{ this.protocol.sendAesKeyRequest(callback, track); } catch(ProtocolException e){ exchange.sendResponseHeaders(404, -1); return; } /* Get AES key. */ byte[] key = callback.get(this.timeout, this.unit); /* Stream channel. */ new ChannelStreamer(this.protocol, track, key, exchange); } /** * Handles incoming commands from the server. * * @param command A command. * @param payload Payload of packet. */ public void commandReceived(int command, byte[] payload){ switch(command){ case Command.COMMAND_SECRETBLK: { /* Check length. */ if(payload.length != 336){ System.err.format("Got command 0x02 with len %d, expected 336!\n", payload.length); } /* Check RSA public key. */ byte[] rsaPublicKey = RSA.keyToBytes(this.session.getRSAPublicKey()); for(int i = 0; i < 128; i++){ if(payload[16 + i] != rsaPublicKey[i]){ System.err.format("RSA public key doesn't match! %d\n", i); break; } } /* Send cache hash. */ try{ this.protocol.sendCacheHash(); } catch(ProtocolException e){ /* Just don't care. */ } break; } case Command.COMMAND_PING: { /* Ignore the timestamp but respond to the request. */ /* int timestamp = IntegerUtilities.bytesToInteger(payload); */ try{ this.protocol.sendPong(); } catch(ProtocolException e){ /* Just don't care. */ } break; } case Command.COMMAND_CHANNELDATA: { Channel.process(payload); break; } case Command.COMMAND_CHANNELERR: { Channel.error(payload); break; } case Command.COMMAND_AESKEY: { /* Channel id is at offset 2. AES Key is at offset 4. */ Channel.process(Arrays.copyOfRange(payload, 2, payload.length)); break; } case Command.COMMAND_COUNTRYCODE: { this.user.setCountry(new String(payload, Charset.forName("UTF-8"))); /* Release 'country' permit. */ this.wait.release(); break; } case Command.COMMAND_NOTIFY: { /* HTML-notification, shown in a yellow bar in the official client. */ /* Skip 11 byte header... */ this.user.setNotification(new String( Arrays.copyOfRange(payload, 11, payload.length), Charset.forName("UTF-8") )); break; } case Command.COMMAND_PRODINFO: { this.user = XMLUserParser.parseUser(payload, "UTF-8", this.user); /* Release 'prodinfo' permit. */ this.wait.release(); /* Payload is uncompressed XML. */ if(!this.user.isPremium()){ System.err.println( "Sorry, you need a premium account to use jotify (this is a restriction by Spotify)." ); } break; } default: { break; } } } public int length(){ return this.player.length(); } public void pause() { this.player.pause(); } public void play(Track track, PlaybackListener listener){ this.player.play(track, listener); } public void play(){ this.player.play(); } public int position(){ return this.player.position(); } public void stop(){ this.player.stop(); } public float volume(){ return this.player.volume(); } public void volume(float volume){ this.player.volume(volume); } }
package com.communitysurvivalgames.thesurvivalgames.managers; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Color; import org.bukkit.FireworkEffect; import org.bukkit.FireworkEffect.Type; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.block.Block; import org.bukkit.block.Chest; import org.bukkit.enchantments.Enchantment; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import org.bukkit.potion.Potion; import org.bukkit.potion.PotionType; import com.communitysurvivalgames.thesurvivalgames.exception.ArenaNotFoundException; import com.communitysurvivalgames.thesurvivalgames.objects.SGArena; import com.communitysurvivalgames.thesurvivalgames.rollback.ChangedBlock; import com.communitysurvivalgames.thesurvivalgames.util.EconUtil; import com.communitysurvivalgames.thesurvivalgames.util.FireworkEffectPlayer; import com.communitysurvivalgames.thesurvivalgames.util.FireworkUtil; import com.communitysurvivalgames.thesurvivalgames.util.IconMenu; import com.communitysurvivalgames.thesurvivalgames.util.IconMenu.OptionClickEvent; public class SponsorManager { private final IconMenu players; private final IconMenu sponsor; private Map<String, String> inMenu = new HashMap<String, String>(); SGArena a; public SponsorManager(final SGArena a) { this.a = a; sponsor = new IconMenu("Select an item to sponsor", 54, false, new IconMenu.OptionClickEventHandler() { @Override public void onOptionClick(final OptionClickEvent event) { int price = Integer.parseInt(event.getItem().getItemMeta().getLore().get(1).replaceAll("[^0-9]", "")); if (!EconUtil.removePoints(event.getPlayer(), price)) { event.getPlayer().sendMessage(ChatColor.RED + "You cannot afford to buy that!"); event.setWillClose(true); return; } Player sponsored = Bukkit.getPlayer(inMenu.get(event.getPlayer().getName())); inMenu.remove(event.getPlayer().getName()); final Location loc = sponsored.getLocation(); final Location nloc = sponsored.getLocation(); a.broadcast(ChatColor.RED + "" + ChatColor.BOLD + "Look up, " + ChatColor.GRAY + ChatColor.BOLD + sponsored.getDisplayName() + ChatColor.RED + "" + ChatColor.BOLD + ", you have been sponsored!"); FireworkEffect fEffect = FireworkEffect.builder().withColor(Color.SILVER).withFade(Color.WHITE).trail(true).flicker(false).with(Type.BALL).build(); FireworkUtil.getCircleUtil().playFireworkLine(nloc.add(0, 50, 0), loc, fEffect, 50); Bukkit.getScheduler().scheduleSyncDelayedTask(SGApi.getPlugin(), new Runnable() { @Override public void run() { Block block = loc.getWorld().getBlockAt(loc); Material m = block.getType(); block.setType(Material.CHEST); Chest chest = (Chest) block.getState(); chest.getInventory().setItem(13, event.getItem()); FireworkEffect fChestEffect = FireworkEffect.builder().withTrail().flicker(false).with(Type.STAR).withColor(Color.RED).withColor(Color.BLUE).withColor(Color.YELLOW).withColor(Color.GREEN).build(); try { FireworkEffectPlayer.getFireworkEffectPlayer().playFirework(loc.getWorld(), new Location(loc.getWorld(), loc.getBlockX() + 0.5, loc.getBlockY(), loc.getBlockZ() + 0.5), fChestEffect); } catch (Exception e) { //If the firework dosen't work.... To bad } try { SGArena a = SGApi.getArenaManager().getArena(event.getPlayer()); a.looted.add(chest); a.changedBlocks.add(new ChangedBlock(event.getPlayer().getWorld().getName(), m, (byte) 0, Material.CHEST, chest.getBlock().getData(), chest.getBlock().getX(), chest.getBlock().getY(), chest.getBlock().getZ())); } catch (ArenaNotFoundException ignored) {} } }, 155L); event.setWillClose(true); } }, SGApi.getPlugin()); ItemStack tmp; ItemMeta meta; ///////////////// // Axes // ///////////////// sponsor.setOption(0, new ItemStack(Material.WOOD_AXE), ChatColor.translateAlternateColorCodes('&', "Wooden Axe"), new String[] { ChatColor.translateAlternateColorCodes('&', "Basic, starter weapon"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$2") : ChatColor.translateAlternateColorCodes('&', "&e&l2 Points") }); sponsor.setOption(1, new ItemStack(Material.GOLD_AXE), ChatColor.translateAlternateColorCodes('&', "Golden Axe"), new String[] { ChatColor.translateAlternateColorCodes('&', "&eSuch Shiney"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$2") : ChatColor.translateAlternateColorCodes('&', "&e&l2 Points") }); sponsor.setOption(2, new ItemStack(Material.STONE_AXE), ChatColor.translateAlternateColorCodes('&', "Stone Axe"), new String[] { ChatColor.translateAlternateColorCodes('&', "&bA good solid bit of damage there!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$7") : ChatColor.translateAlternateColorCodes('&', "&e&l7 Points") }); tmp = new ItemStack(Material.WOOD_AXE); tmp.addEnchantment(Enchantment.DAMAGE_ALL, 1); sponsor.setOption(3, tmp, ChatColor.translateAlternateColorCodes('&', "Wooden Axe - Sharp 1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&dSo glowey!!!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$10") : ChatColor.translateAlternateColorCodes('&', "&e&l10 Points") }); sponsor.setOption(4, new ItemStack(Material.IRON_AXE), ChatColor.translateAlternateColorCodes('&', "Iron Axe"), new String[] { ChatColor.translateAlternateColorCodes('&', "&aSmelted in the forges of Quantum64"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$16") : ChatColor.translateAlternateColorCodes('&', "&e&l16 Points") }); tmp = new ItemStack(Material.STONE_AXE); tmp.addEnchantment(Enchantment.DAMAGE_ALL, 1); sponsor.setOption(5, tmp, ChatColor.translateAlternateColorCodes('&', "Stone Axe - Sharp 1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&2Mre damage than an iron axe!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$22") : ChatColor.translateAlternateColorCodes('&', "&e&l22 Points") }); tmp = new ItemStack(Material.STONE_AXE); tmp.addEnchantment(Enchantment.DAMAGE_ALL, 2); sponsor.setOption(6, tmp, ChatColor.translateAlternateColorCodes('&', "Stone Axe - Sharp 2"), new String[] { ChatColor.translateAlternateColorCodes('&', "&3Mre damage than a diamond axe!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$35") : ChatColor.translateAlternateColorCodes('&', "&e&l35 Points") }); tmp = new ItemStack(Material.IRON_AXE); tmp.addEnchantment(Enchantment.DAMAGE_ALL, 1); sponsor.setOption(7, tmp, ChatColor.translateAlternateColorCodes('&', "&6Rapture"), new String[] { ChatColor.translateAlternateColorCodes('&', "&5*Infused with magic*"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$40") : ChatColor.translateAlternateColorCodes('&', "&e&l40 Points") }); tmp = new ItemStack(Material.DIAMOND_AXE); meta = tmp.getItemMeta(); meta.addEnchant(Enchantment.FIRE_ASPECT, 1, true); tmp.setItemMeta(meta); sponsor.setOption(8, tmp, ChatColor.translateAlternateColorCodes('&', "&6Firestorm"), new String[] { ChatColor.translateAlternateColorCodes('&', "&2Plazma Hardened"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$70") : ChatColor.translateAlternateColorCodes('&', "&e&l70 Points") }); ///////////////// // Swords // ///////////////// sponsor.setOption(9, new ItemStack(Material.WOOD_SWORD), ChatColor.translateAlternateColorCodes('&', "Wooden Sword"), new String[] { ChatColor.translateAlternateColorCodes('&', "Rough-cut wooden weapon"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$4") : ChatColor.translateAlternateColorCodes('&', "&e&l4 Points") }); sponsor.setOption(10, new ItemStack(Material.STONE_SWORD), ChatColor.translateAlternateColorCodes('&', "Stone Sword"), new String[] { ChatColor.translateAlternateColorCodes('&', "&bHand cut stone!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$15") : ChatColor.translateAlternateColorCodes('&', "&e&l15 Points") }); tmp = new ItemStack(Material.WOOD_SWORD); tmp.addEnchantment(Enchantment.DAMAGE_ALL, 1); sponsor.setOption(11, tmp, ChatColor.translateAlternateColorCodes('&', "Wooden Sword - Sharp 1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&dMagical Damage"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$25") : ChatColor.translateAlternateColorCodes('&', "&e&l25 Points") }); sponsor.setOption(12, new ItemStack(Material.IRON_SWORD), ChatColor.translateAlternateColorCodes('&', "Iron Sword"), new String[] { ChatColor.translateAlternateColorCodes('&', "&aCast in the forges of Quantum64"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$16") : ChatColor.translateAlternateColorCodes('&', "&e&l16 Points") }); tmp = new ItemStack(Material.STONE_SWORD); tmp.addEnchantment(Enchantment.DAMAGE_ALL, 1); sponsor.setOption(13, tmp, ChatColor.translateAlternateColorCodes('&', "Stone Sword - Sharp 1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&2Mre damage than an iron sword!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$37") : ChatColor.translateAlternateColorCodes('&', "&e&l37 Points") }); tmp = new ItemStack(Material.STONE_SWORD); tmp.addEnchantment(Enchantment.DAMAGE_ALL, 2); sponsor.setOption(14, tmp, ChatColor.translateAlternateColorCodes('&', "Stone Sword - Sharp 2"), new String[] { ChatColor.translateAlternateColorCodes('&', "&3Mre damage than a diamond sword!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$44") : ChatColor.translateAlternateColorCodes('&', "&e&l44 Points") }); tmp = new ItemStack(Material.IRON_SWORD); tmp.addEnchantment(Enchantment.DAMAGE_ALL, 1); sponsor.setOption(15, tmp, ChatColor.translateAlternateColorCodes('&', "&6Eclipse"), new String[] { ChatColor.translateAlternateColorCodes('&', "&5Brings death and destruction"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$50") : ChatColor.translateAlternateColorCodes('&', "&e&l50 Points") }); tmp = new ItemStack(Material.DIAMOND_SWORD); tmp.addEnchantment(EnchantmentManager.shocking, 1); sponsor.setOption(16, tmp, ChatColor.translateAlternateColorCodes('&', "&6Corruption"), new String[] { ChatColor.translateAlternateColorCodes('&', "&0Infused with the power of magical Flux"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$82") : ChatColor.translateAlternateColorCodes('&', "&e&l82 Points") }); tmp = new ItemStack(Material.DIAMOND_SWORD); tmp.addEnchantment(Enchantment.DAMAGE_ALL, 1); sponsor.setOption(17, tmp, ChatColor.translateAlternateColorCodes('&', "&6Razor Sharp Dagger"), new String[] { ChatColor.translateAlternateColorCodes('&', "&2You might get cut just by looking at this thing..."), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$90") : ChatColor.translateAlternateColorCodes('&', "&e&l90 Points") }); ///////////////// // Helmets // ///////////////// sponsor.setOption(18, new ItemStack(Material.LEATHER_HELMET), ChatColor.translateAlternateColorCodes('&', "Leather Helmet"), new String[] { ChatColor.translateAlternateColorCodes('&', "&bBasic protection - Better than nothing"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$4") : ChatColor.translateAlternateColorCodes('&', "&e&l4 Points") }); sponsor.setOption(19, new ItemStack(Material.IRON_HELMET), ChatColor.translateAlternateColorCodes('&', "Iron Helmet"), new String[] { ChatColor.translateAlternateColorCodes('&', "&eIronclad protection - For the brave"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$15") : ChatColor.translateAlternateColorCodes('&', "&e&l15 Points") }); tmp = new ItemStack(Material.LEATHER_HELMET); tmp.addEnchantment(Enchantment.PROTECTION_ENVIRONMENTAL, 1); sponsor.setOption(20, tmp, ChatColor.translateAlternateColorCodes('&', "Leather Helmet - Prot 1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&dInstilled Magic"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$25") : ChatColor.translateAlternateColorCodes('&', "&e&l25 Points") }); tmp = new ItemStack(Material.IRON_HELMET); tmp.addEnchantment(Enchantment.THORNS, 1); sponsor.setOption(21, tmp, ChatColor.translateAlternateColorCodes('&', "&6Death's Deflection"), new String[] { ChatColor.translateAlternateColorCodes('&', "&2Caution - Spiky surface"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$37") : ChatColor.translateAlternateColorCodes('&', "&e&l37 Points") }); ///////////////// // Chestplates // ///////////////// sponsor.setOption(22, new ItemStack(Material.LEATHER_CHESTPLATE), ChatColor.translateAlternateColorCodes('&', "Leather Chestplate"), new String[] { ChatColor.translateAlternateColorCodes('&', "&bBasic protection - Better than nothing"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$4") : ChatColor.translateAlternateColorCodes('&', "&e&l4 Points") }); sponsor.setOption(23, new ItemStack(Material.IRON_CHESTPLATE), ChatColor.translateAlternateColorCodes('&', "Iron Chestplate"), new String[] { ChatColor.translateAlternateColorCodes('&', "&eIronclad protection - For the brave"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$15") : ChatColor.translateAlternateColorCodes('&', "&e&l15 Points") }); tmp = new ItemStack(Material.LEATHER_CHESTPLATE); tmp.addEnchantment(Enchantment.PROTECTION_ENVIRONMENTAL, 1); sponsor.setOption(24, tmp, ChatColor.translateAlternateColorCodes('&', "Leather Chestplate - Prot 1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&dInstilled Magic"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$25") : ChatColor.translateAlternateColorCodes('&', "&e&l25 Points") }); tmp = new ItemStack(Material.IRON_CHESTPLATE); tmp.addEnchantment(Enchantment.PROTECTION_PROJECTILE, 1); sponsor.setOption(25, tmp, ChatColor.translateAlternateColorCodes('&', "&6Soul Keeper"), new String[] { ChatColor.translateAlternateColorCodes('&', "&dBane of Bows"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$37") : ChatColor.translateAlternateColorCodes('&', "&e&l37 Points") }); tmp = new ItemStack(Material.DIAMOND_CHESTPLATE); tmp.addEnchantment(Enchantment.PROTECTION_ENVIRONMENTAL, 1); sponsor.setOption(26, tmp, ChatColor.translateAlternateColorCodes('&', "&6Fusion Ward"), new String[] { ChatColor.translateAlternateColorCodes('&', "&2For the Godlike Warriors"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$97") : ChatColor.translateAlternateColorCodes('&', "&e&l97 Points") }); ///////////////// // Boots // ///////////////// sponsor.setOption(27, new ItemStack(Material.LEATHER_BOOTS), ChatColor.translateAlternateColorCodes('&', "Leather Boots"), new String[] { ChatColor.translateAlternateColorCodes('&', "&bBasic protection - Better than nothing"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$4") : ChatColor.translateAlternateColorCodes('&', "&e&l4 Points") }); sponsor.setOption(28, new ItemStack(Material.IRON_BOOTS), ChatColor.translateAlternateColorCodes('&', "Iron Boots"), new String[] { ChatColor.translateAlternateColorCodes('&', "&eIronclad protection - For the brave"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$15") : ChatColor.translateAlternateColorCodes('&', "&e&l15 Points") }); tmp = new ItemStack(Material.LEATHER_BOOTS); tmp.addEnchantment(Enchantment.PROTECTION_ENVIRONMENTAL, 1); sponsor.setOption(29, tmp, ChatColor.translateAlternateColorCodes('&', "Leather Boots - Prot 1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&dInstilled Magic"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$25") : ChatColor.translateAlternateColorCodes('&', "&e&l25 Points") }); tmp = new ItemStack(Material.IRON_BOOTS); tmp.addEnchantment(Enchantment.PROTECTION_FALL, 2); sponsor.setOption(30, tmp, ChatColor.translateAlternateColorCodes('&', "&6Silverlight"), new String[] { ChatColor.translateAlternateColorCodes('&', "&aNow... You can fly!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$37") : ChatColor.translateAlternateColorCodes('&', "&e&l37 Points") }); ///////////////// // Leggings // ///////////////// sponsor.setOption(31, new ItemStack(Material.LEATHER_LEGGINGS), ChatColor.translateAlternateColorCodes('&', "Leather Leggings"), new String[] { ChatColor.translateAlternateColorCodes('&', "&bBasic protection - Better than nothing"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$4") : ChatColor.translateAlternateColorCodes('&', "&e&l4 Points") }); sponsor.setOption(32, new ItemStack(Material.IRON_LEGGINGS), ChatColor.translateAlternateColorCodes('&', "Iron Leggings"), new String[] { ChatColor.translateAlternateColorCodes('&', "&eIronclad protection - For the brave"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$15") : ChatColor.translateAlternateColorCodes('&', "&e&l15 Points") }); tmp = new ItemStack(Material.LEATHER_LEGGINGS); tmp.addEnchantment(Enchantment.PROTECTION_ENVIRONMENTAL, 1); sponsor.setOption(33, tmp, ChatColor.translateAlternateColorCodes('&', "Leather Leggings - Prot 1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&dInstilled Magic"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$25") : ChatColor.translateAlternateColorCodes('&', "&e&l25 Points") }); tmp = new ItemStack(Material.IRON_LEGGINGS); tmp.addEnchantment(Enchantment.PROTECTION_EXPLOSIONS, 1); sponsor.setOption(34, tmp, ChatColor.translateAlternateColorCodes('&', "&6Stormcaller"), new String[] { ChatColor.translateAlternateColorCodes('&', "&dTNT got nothin' on me bro"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$37") : ChatColor.translateAlternateColorCodes('&', "&e&l37 Points") }); tmp = new ItemStack(Material.DIAMOND_LEGGINGS); tmp.addEnchantment(Enchantment.PROTECTION_ENVIRONMENTAL, 2); sponsor.setOption(35, tmp, ChatColor.translateAlternateColorCodes('&', "&6Desolation"), new String[] { ChatColor.translateAlternateColorCodes('&', "&2For the Godlike Warriors"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$97") : ChatColor.translateAlternateColorCodes('&', "&e&l97 Points") }); ///////////////// // Food // ///////////////// sponsor.setOption(36, new ItemStack(Material.SPIDER_EYE), ChatColor.translateAlternateColorCodes('&', "Spider Eye"), new String[] { ChatColor.translateAlternateColorCodes('&', "&4If you have nothing else to eat..."), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$1") : ChatColor.translateAlternateColorCodes('&', "&e&l1 Point") }); sponsor.setOption(37, new ItemStack(Material.BREAD, 2), ChatColor.translateAlternateColorCodes('&', "Bread"), new String[] { ChatColor.translateAlternateColorCodes('&', "&6Fresh baked bread - Just for you!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$4") : ChatColor.translateAlternateColorCodes('&', "&e&l4 Points") }); sponsor.setOption(38, new ItemStack(Material.COOKIE, 6), ChatColor.translateAlternateColorCodes('&', "Cookie"), new String[] { ChatColor.translateAlternateColorCodes('&', "&fChocolate Chip!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$9") : ChatColor.translateAlternateColorCodes('&', "&e&l9 Points") }); sponsor.setOption(39, new ItemStack(Material.PORK, 2), ChatColor.translateAlternateColorCodes('&', "Cooked Pork"), new String[] { ChatColor.translateAlternateColorCodes('&', "&dPiggie Noooooo"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$14") : ChatColor.translateAlternateColorCodes('&', "&e&l14 Point") }); sponsor.setOption(40, new ItemStack(Material.COOKED_BEEF, 2), ChatColor.translateAlternateColorCodes('&', "Steak"), new String[] { ChatColor.translateAlternateColorCodes('&', "&cWe're gonna' eat well tonight!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$16") : ChatColor.translateAlternateColorCodes('&', "&e&l16 Points") }); sponsor.setOption(41, new ItemStack(Material.MUSHROOM_SOUP, 4), ChatColor.translateAlternateColorCodes('&', "&6Mushroom Stew"), new String[] { ChatColor.translateAlternateColorCodes('&', "&aRegen Health"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$20") : ChatColor.translateAlternateColorCodes('&', "&e&l20 Points") }); sponsor.setOption(42, new ItemStack(Material.GOLDEN_APPLE, 2), ChatColor.translateAlternateColorCodes('&', "&6Golden Apple"), new String[] { ChatColor.translateAlternateColorCodes('&', "&eExtra hearts for days"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$60") : ChatColor.translateAlternateColorCodes('&', "&e&l60 Points") }); sponsor.setOption(43, new ItemStack(Material.GOLDEN_APPLE, 1, (short) 1), ChatColor.translateAlternateColorCodes('&', "&6Golden Apple - T2"), new String[] { ChatColor.translateAlternateColorCodes('&', "&eNOTCH AAAPPPLLLLEEEEE"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$150") : ChatColor.translateAlternateColorCodes('&', "&e&l150 Points") }); sponsor.setOption(44, new ItemStack(Material.GOLDEN_CARROT, 10), ChatColor.translateAlternateColorCodes('&', "&6Golden Carrot"), new String[] { ChatColor.translateAlternateColorCodes('&', "&eUltimant food pack"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$75") : ChatColor.translateAlternateColorCodes('&', "&e&l175 Points") }); ///////////////// // Potions // ///////////////// Potion p; ItemStack potionItem; p = new Potion(PotionType.REGEN, 1); p.extend(); potionItem = new ItemStack(Material.POTION); p.apply(potionItem); sponsor.setOption(45, potionItem, ChatColor.translateAlternateColorCodes('&', "&6Regen Potion - T1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&6Fill up those hearts!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$10") : ChatColor.translateAlternateColorCodes('&', "&e&l10 Points") }); p = new Potion(PotionType.REGEN, 2); potionItem = new ItemStack(Material.POTION); p.apply(potionItem); sponsor.setOption(46, potionItem, ChatColor.translateAlternateColorCodes('&', "&6Regen Potion - T2"), new String[] { ChatColor.translateAlternateColorCodes('&', "&6Fill up those hearts! - Reloaded"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$20") : ChatColor.translateAlternateColorCodes('&', "&e&l20 Points") }); p = new Potion(PotionType.INSTANT_HEAL, 1); potionItem = new ItemStack(Material.POTION); p.apply(potionItem); sponsor.setOption(47, potionItem, ChatColor.translateAlternateColorCodes('&', "&6Instant Health - T1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&6Free health? I'll take it"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$10") : ChatColor.translateAlternateColorCodes('&', "&e&l10 Points") }); p = new Potion(PotionType.INSTANT_HEAL, 2); potionItem = new ItemStack(Material.POTION); p.apply(potionItem); sponsor.setOption(48, potionItem, ChatColor.translateAlternateColorCodes('&', "&6Instant Health - T2"), new String[] { ChatColor.translateAlternateColorCodes('&', "&6Fill up those hearts! - Reloaded"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$20") : ChatColor.translateAlternateColorCodes('&', "&e&l20 Points") }); p = new Potion(PotionType.SPEED, 1); p.extend(); potionItem = new ItemStack(Material.POTION); p.apply(potionItem); sponsor.setOption(49, potionItem, ChatColor.translateAlternateColorCodes('&', "&6Swiftness - T1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&6Run away!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$15") : ChatColor.translateAlternateColorCodes('&', "&e&l15 Points") }); p = new Potion(PotionType.STRENGTH, 1); potionItem = new ItemStack(Material.POTION); p.apply(potionItem); sponsor.setOption(50, potionItem, ChatColor.translateAlternateColorCodes('&', "&6Strength - T1"), new String[] { ChatColor.translateAlternateColorCodes('&', "&6Fight like a real warrior!"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$30") : ChatColor.translateAlternateColorCodes('&', "&e&l30 Points") }); p = new Potion(PotionType.INSTANT_DAMAGE, 2); p.splash(); potionItem = new ItemStack(Material.POTION); p.apply(potionItem); sponsor.setOption(51, potionItem, ChatColor.translateAlternateColorCodes('&', "&6Instant Damage - T2"), new String[] { ChatColor.translateAlternateColorCodes('&', "&6Splash potion of harming"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$20") : ChatColor.translateAlternateColorCodes('&', "&e&l20 Points") }); p = new Potion(PotionType.POISON, 2); p.splash(); potionItem = new ItemStack(Material.POTION); p.apply(potionItem); sponsor.setOption(52, potionItem, ChatColor.translateAlternateColorCodes('&', "&6Poison - T2"), new String[] { ChatColor.translateAlternateColorCodes('&', "&6Call it a day for anyone chasing you"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$35") : ChatColor.translateAlternateColorCodes('&', "&e&l35 Points") }); p = new Potion(PotionType.INVISIBILITY, 2); potionItem = new ItemStack(Material.POTION); p.apply(potionItem); sponsor.setOption(53, potionItem, ChatColor.translateAlternateColorCodes('&', "&6Invisibility - T2"), new String[] { ChatColor.translateAlternateColorCodes('&', "&6Can't see me now muahahah"), EconUtil.isHooked() ? ChatColor.translateAlternateColorCodes('&', "&e&l$40") : ChatColor.translateAlternateColorCodes('&', "&e&l40 Points") }); players = new IconMenu("Select the player to sponsor", 27, false, new IconMenu.OptionClickEventHandler() { @Override public void onOptionClick(final OptionClickEvent event) { if (event.getItem().getType() == Material.REDSTONE_BLOCK) { event.setWillClose(true); return; } Bukkit.getScheduler().scheduleSyncDelayedTask(SGApi.getPlugin(), new Runnable() { @Override public void run() { inMenu.put(event.getPlayer().getName(), event.getItem().getItemMeta().getDisplayName()); sponsor.open(event.getPlayer()); } }, 2L); event.setWillClose(true); } }, SGApi.getPlugin()); } public void sponsor(Player sender) { List<ItemStack> items = new ArrayList<ItemStack>(); for (String s : a.getPlayers()) { try { if (SGApi.getArenaManager().getArena(Bukkit.getPlayer(s)).spectators.contains(s)) { continue; } } catch (ArenaNotFoundException e) { continue; } ItemStack item = new ItemStack(Material.EMERALD, (int) Bukkit.getPlayer(s).getHealth()); ItemMeta meta = item.getItemMeta(); meta.setDisplayName(s); item.setItemMeta(meta); items.add(item); } Collections.sort(items, new Comparator<ItemStack>() { public int compare(ItemStack o1, ItemStack o2) { return Integer.compare(o1.getAmount(), o2.getAmount()); } }); players.clearOptions(); for (int i = 0; i < items.size(); i++) { players.setOption(i, items.get(i), items.get(i).getItemMeta().getDisplayName(), new String[] { ChatColor.translateAlternateColorCodes('&', "&e&lClick to sponsor this person!"), ChatColor.translateAlternateColorCodes('&', "&aNote: Health = Amount of emeralds") }); } players.setOption(26, new ItemStack(Material.REDSTONE_BLOCK), ChatColor.RED + "Cancel", ChatColor.translateAlternateColorCodes('&', "&e&lExits out of this menu")); players.open(sender); } }
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package org.jetbrains.kotlin.idea.imports; import com.intellij.testFramework.TestDataPath; import org.jetbrains.kotlin.idea.test.JUnit3RunnerWithInners; import org.jetbrains.kotlin.idea.test.KotlinTestUtils; import org.jetbrains.kotlin.test.TestMetadata; import org.jetbrains.kotlin.idea.test.TestRoot; import org.junit.runner.RunWith; /** * This class is generated by {@link org.jetbrains.kotlin.testGenerator.generator.TestGenerator}. * DO NOT MODIFY MANUALLY. */ @SuppressWarnings("all") @TestRoot("idea/tests") @TestDataPath("$CONTENT_ROOT") @RunWith(JUnit3RunnerWithInners.class) public abstract class JsOptimizeImportsTestGenerated extends AbstractJsOptimizeImportsTest { @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/editor/optimizeImports/js") public static class Js extends AbstractJsOptimizeImportsTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("DefaultJsImports.kt") public void testDefaultJsImports() throws Exception { runTest("testData/editor/optimizeImports/js/DefaultJsImports.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/editor/optimizeImports/common") public abstract static class Common extends AbstractJsOptimizeImportsTest { @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/editor/optimizeImports/common/basic") public static class Basic extends AbstractJsOptimizeImportsTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("UnusedClass.kt") public void testUnusedClass() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UnusedClass.kt"); } @TestMetadata("UnusedExtensionFunction.kt") public void testUnusedExtensionFunction() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UnusedExtensionFunction.kt"); } @TestMetadata("UnusedExtensionProperty.kt") public void testUnusedExtensionProperty() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UnusedExtensionProperty.kt"); } @TestMetadata("UnusedFunction.kt") public void testUnusedFunction() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UnusedFunction.kt"); } @TestMetadata("UnusedProperty.kt") public void testUnusedProperty() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UnusedProperty.kt"); } @TestMetadata("UnusedStarImport.kt") public void testUnusedStarImport() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UnusedStarImport.kt"); } @TestMetadata("UsedClass.kt") public void testUsedClass() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedClass.kt"); } @TestMetadata("UsedClassWithAlias.kt") public void testUsedClassWithAlias() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedClassWithAlias.kt"); } @TestMetadata("UsedConstructor.kt") public void testUsedConstructor() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedConstructor.kt"); } @TestMetadata("UsedExtensionFunction.kt") public void testUsedExtensionFunction() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedExtensionFunction.kt"); } @TestMetadata("UsedExtensionFunctionImplicitReceiver.kt") public void testUsedExtensionFunctionImplicitReceiver() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedExtensionFunctionImplicitReceiver.kt"); } @TestMetadata("UsedExtensionProperty.kt") public void testUsedExtensionProperty() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedExtensionProperty.kt"); } @TestMetadata("UsedFunction.kt") public void testUsedFunction() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedFunction.kt"); } @TestMetadata("UsedFunctionReference.kt") public void testUsedFunctionReference() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedFunctionReference.kt"); } @TestMetadata("UsedProperty.kt") public void testUsedProperty() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedProperty.kt"); } @TestMetadata("UsedStarImport.kt") public void testUsedStarImport() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedStarImport.kt"); } @TestMetadata("UsedTypeQualifierWithAlias.kt") public void testUsedTypeQualifierWithAlias() throws Exception { runTest("testData/editor/optimizeImports/common/basic/UsedTypeQualifierWithAlias.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/editor/optimizeImports/common/kt21515") public static class Kt21515 extends AbstractJsOptimizeImportsTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("callableReferenceOnClass.kt") public void testCallableReferenceOnClass() throws Exception { runTest("testData/editor/optimizeImports/common/kt21515/callableReferenceOnClass.kt"); } @TestMetadata("callableReferenceOnClassWithCompanion.kt") public void testCallableReferenceOnClassWithCompanion() throws Exception { runTest("testData/editor/optimizeImports/common/kt21515/callableReferenceOnClassWithCompanion.kt"); } @TestMetadata("callableReferenceOnObject.kt") public void testCallableReferenceOnObject() throws Exception { runTest("testData/editor/optimizeImports/common/kt21515/callableReferenceOnObject.kt"); } @TestMetadata("constructor.kt") public void testConstructor() throws Exception { runTest("testData/editor/optimizeImports/common/kt21515/constructor.kt"); } @TestMetadata("typeReference.kt") public void testTypeReference() throws Exception { runTest("testData/editor/optimizeImports/common/kt21515/typeReference.kt"); } } @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/editor/optimizeImports/common") public static class Uncategorized extends AbstractJsOptimizeImportsTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("ArrayAccessExpression.kt") public void testArrayAccessExpression() throws Exception { runTest("testData/editor/optimizeImports/common/ArrayAccessExpression.kt"); } @TestMetadata("BacktickSort.kt") public void testBacktickSort() throws Exception { runTest("testData/editor/optimizeImports/common/BacktickSort.kt"); } @TestMetadata("ClassMemberImported.kt") public void testClassMemberImported() throws Exception { runTest("testData/editor/optimizeImports/common/ClassMemberImported.kt"); } @TestMetadata("Companion.kt") public void testCompanion() throws Exception { runTest("testData/editor/optimizeImports/common/Companion.kt"); } @TestMetadata("CompanionExtensionFunctionReference.kt") public void testCompanionExtensionFunctionReference() throws Exception { runTest("testData/editor/optimizeImports/common/CompanionExtensionFunctionReference.kt"); } @TestMetadata("CompanionFunction.kt") public void testCompanionFunction() throws Exception { runTest("testData/editor/optimizeImports/common/CompanionFunction.kt"); } @TestMetadata("CompanionFunction2.kt") public void testCompanionFunction2() throws Exception { runTest("testData/editor/optimizeImports/common/CompanionFunction2.kt"); } @TestMetadata("ComponentFunction.kt") public void testComponentFunction() throws Exception { runTest("testData/editor/optimizeImports/common/ComponentFunction.kt"); } @TestMetadata("ConflictWithAlias.kt") public void testConflictWithAlias() throws Exception { runTest("testData/editor/optimizeImports/common/ConflictWithAlias.kt"); } @TestMetadata("ConflictWithAlias2.kt") public void testConflictWithAlias2() throws Exception { runTest("testData/editor/optimizeImports/common/ConflictWithAlias2.kt"); } @TestMetadata("ConflictWithUnresolvedName.kt") public void testConflictWithUnresolvedName() throws Exception { runTest("testData/editor/optimizeImports/common/ConflictWithUnresolvedName.kt"); } @TestMetadata("ConflictWithUnresolvedName2.kt") public void testConflictWithUnresolvedName2() throws Exception { runTest("testData/editor/optimizeImports/common/ConflictWithUnresolvedName2.kt"); } @TestMetadata("CurrentPackage.kt") public void testCurrentPackage() throws Exception { runTest("testData/editor/optimizeImports/common/CurrentPackage.kt"); } @TestMetadata("DefaultImportAndAlias.kt") public void testDefaultImportAndAlias() throws Exception { runTest("testData/editor/optimizeImports/common/DefaultImportAndAlias.kt"); } @TestMetadata("DefaultImportAndAlias2.kt") public void testDefaultImportAndAlias2() throws Exception { runTest("testData/editor/optimizeImports/common/DefaultImportAndAlias2.kt"); } @TestMetadata("DefaultObjectReference.kt") public void testDefaultObjectReference() throws Exception { runTest("testData/editor/optimizeImports/common/DefaultObjectReference.kt"); } @TestMetadata("Enums.kt") public void testEnums() throws Exception { runTest("testData/editor/optimizeImports/common/Enums.kt"); } @TestMetadata("ExtensionFunctionalTypeValFromCompanionObject.kt") public void testExtensionFunctionalTypeValFromCompanionObject() throws Exception { runTest("testData/editor/optimizeImports/common/ExtensionFunctionalTypeValFromCompanionObject.kt"); } @TestMetadata("ExtensionFunctionalTypeValFromCompanionObjectCallOnCompanion.kt") public void testExtensionFunctionalTypeValFromCompanionObjectCallOnCompanion() throws Exception { runTest("testData/editor/optimizeImports/common/ExtensionFunctionalTypeValFromCompanionObjectCallOnCompanion.kt"); } @TestMetadata("ExtensionFunctionalTypeValFromCompanionObjectNonExtCall.kt") public void testExtensionFunctionalTypeValFromCompanionObjectNonExtCall() throws Exception { runTest("testData/editor/optimizeImports/common/ExtensionFunctionalTypeValFromCompanionObjectNonExtCall.kt"); } @TestMetadata("InvokeFunction.kt") public void testInvokeFunction() throws Exception { runTest("testData/editor/optimizeImports/common/InvokeFunction.kt"); } @TestMetadata("IteratorFunction.kt") public void testIteratorFunction() throws Exception { runTest("testData/editor/optimizeImports/common/IteratorFunction.kt"); } @TestMetadata("IteratorFunction2.kt") public void testIteratorFunction2() throws Exception { runTest("testData/editor/optimizeImports/common/IteratorFunction2.kt"); } @TestMetadata("KT11640.kt") public void testKT11640() throws Exception { runTest("testData/editor/optimizeImports/common/KT11640.kt"); } @TestMetadata("KT11640_1.kt") public void testKT11640_1() throws Exception { runTest("testData/editor/optimizeImports/common/KT11640_1.kt"); } @TestMetadata("KT13689.kt") public void testKT13689() throws Exception { runTest("testData/editor/optimizeImports/common/KT13689.kt"); } @TestMetadata("KT9875.kt") public void testKT9875() throws Exception { runTest("testData/editor/optimizeImports/common/KT9875.kt"); } @TestMetadata("KeywordNames.kt") public void testKeywordNames() throws Exception { runTest("testData/editor/optimizeImports/common/KeywordNames.kt"); } @TestMetadata("Kt2488EnumEntry.kt") public void testKt2488EnumEntry() throws Exception { runTest("testData/editor/optimizeImports/common/Kt2488EnumEntry.kt"); } @TestMetadata("Kt2709.kt") public void testKt2709() throws Exception { runTest("testData/editor/optimizeImports/common/Kt2709.kt"); } @TestMetadata("Kt32409.kt") public void testKt32409() throws Exception { runTest("testData/editor/optimizeImports/common/Kt32409.kt"); } @TestMetadata("MemberImports.kt") public void testMemberImports() throws Exception { runTest("testData/editor/optimizeImports/common/MemberImports.kt"); } @TestMetadata("MembersInScope.kt") public void testMembersInScope() throws Exception { runTest("testData/editor/optimizeImports/common/MembersInScope.kt"); } @TestMetadata("NestedClassConstructorReferenceThroughTopLevelClass.kt") public void testNestedClassConstructorReferenceThroughTopLevelClass() throws Exception { runTest("testData/editor/optimizeImports/common/NestedClassConstructorReferenceThroughTopLevelClass.kt"); } @TestMetadata("NestedClassReferenceOutsideClassBody.kt") public void testNestedClassReferenceOutsideClassBody() throws Exception { runTest("testData/editor/optimizeImports/common/NestedClassReferenceOutsideClassBody.kt"); } @TestMetadata("NestedClassReferenceThroughTopLevelClass.kt") public void testNestedClassReferenceThroughTopLevelClass() throws Exception { runTest("testData/editor/optimizeImports/common/NestedClassReferenceThroughTopLevelClass.kt"); } @TestMetadata("Overloads.kt") public void testOverloads() throws Exception { runTest("testData/editor/optimizeImports/common/Overloads.kt"); } @TestMetadata("ProvideDelegate.kt") public void testProvideDelegate() throws Exception { runTest("testData/editor/optimizeImports/common/ProvideDelegate.kt"); } @TestMetadata("ProvideDelegate2.kt") public void testProvideDelegate2() throws Exception { runTest("testData/editor/optimizeImports/common/ProvideDelegate2.kt"); } @TestMetadata("ResolvedImportAndUnresolvedReference.kt") public void testResolvedImportAndUnresolvedReference() throws Exception { runTest("testData/editor/optimizeImports/common/ResolvedImportAndUnresolvedReference.kt"); } @TestMetadata("SeveralClasses.kt") public void testSeveralClasses() throws Exception { runTest("testData/editor/optimizeImports/common/SeveralClasses.kt"); } @TestMetadata("SeveralClasses2.kt") public void testSeveralClasses2() throws Exception { runTest("testData/editor/optimizeImports/common/SeveralClasses2.kt"); } @TestMetadata("SeveralClasses3.kt") public void testSeveralClasses3() throws Exception { runTest("testData/editor/optimizeImports/common/SeveralClasses3.kt"); } @TestMetadata("TwoConstructors.kt") public void testTwoConstructors() throws Exception { runTest("testData/editor/optimizeImports/common/TwoConstructors.kt"); } @TestMetadata("UnresolvedImport.kt") public void testUnresolvedImport() throws Exception { runTest("testData/editor/optimizeImports/common/UnresolvedImport.kt"); } @TestMetadata("UnresolvedImport2.kt") public void testUnresolvedImport2() throws Exception { runTest("testData/editor/optimizeImports/common/UnresolvedImport2.kt"); } @TestMetadata("WithAlias.kt") public void testWithAlias() throws Exception { runTest("testData/editor/optimizeImports/common/WithAlias.kt"); } @TestMetadata("WithAlias2.kt") public void testWithAlias2() throws Exception { runTest("testData/editor/optimizeImports/common/WithAlias2.kt"); } } } }
/** * Copyright 2010 Richard Johnson & Orin Eman * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * --- * * This file is part of java-libpst. * * java-libpst is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * java-libpst is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with java-libpst. If not, see <http://www.gnu.org/licenses/>. * */ package com.pff; import java.io.*; import java.util.*; /** * PSTFile is the containing class that allows you to access items within a .pst file. * Start here, get the root of the folders and work your way down through your items. * @author Richard Johnson */ public class PSTFile { public static final int ENCRYPTION_TYPE_NONE = 0; public static final int ENCRYPTION_TYPE_COMPRESSIBLE = 1; private static final int MESSAGE_STORE_DESCRIPTOR_IDENTIFIER = 33; private static final int ROOT_FOLDER_DESCRIPTOR_IDENTIFIER = 290; public static final int PST_TYPE_ANSI = 14; protected static final int PST_TYPE_ANSI_2 = 15; public static final int PST_TYPE_UNICODE = 23; // Known GUIDs // Local IDs first public static final int PS_PUBLIC_STRINGS = 0; public static final int PSETID_Common = 1; public static final int PSETID_Address = 2; public static final int PS_INTERNET_HEADERS = 3; public static final int PSETID_Appointment = 4; public static final int PSETID_Meeting = 5; public static final int PSETID_Log = 6; public static final int PSETID_Messaging = 7; public static final int PSETID_Note = 8; public static final int PSETID_PostRss = 9; public static final int PSETID_Task = 10; public static final int PSETID_UnifiedMessaging = 11; public static final int PS_MAPI = 12; public static final int PSETID_AirSync = 13; public static final int PSETID_Sharing = 14; // Now the string guids private static final String guidStrings[] = { "00020329-0000-0000-C000-000000000046", "00062008-0000-0000-C000-000000000046", "00062004-0000-0000-C000-000000000046", "00020386-0000-0000-C000-000000000046", "00062002-0000-0000-C000-000000000046", "6ED8DA90-450B-101B-98DA-00AA003F1305", "0006200A-0000-0000-C000-000000000046", "41F28F13-83F4-4114-A584-EEDB5A6B0BFF", "0006200E-0000-0000-C000-000000000046", "00062041-0000-0000-C000-000000000046", "00062003-0000-0000-C000-000000000046", "4442858E-A9E3-4E80-B900-317A210CC15B", "00020328-0000-0000-C000-000000000046", "71035549-0739-4DCB-9163-00F0580DBBDF", "00062040-0000-0000-C000-000000000046" }; private HashMap<UUID, Integer> guidMap = new HashMap<UUID, Integer>(); // the type of encryption the files uses. private int encryptionType = 0; // our all important tree. private LinkedHashMap<Integer, LinkedList<DescriptorIndexNode>> childrenDescriptorTree = null; private HashMap<Long, Integer> nameToId = new HashMap<Long, Integer>(); private HashMap<String, Integer> stringToId = new HashMap<String, Integer>(); private static HashMap<Integer, Long> idToName = new HashMap<Integer, Long>(); private HashMap<Integer, String> idToString = new HashMap<Integer, String>(); private byte[] guids = null; private int itemCount = 0; private RandomAccessFile in; /** * constructor * @param fileName * @throws FileNotFoundException * @throws PSTException * @throws IOException */ public PSTFile(String fileName) throws FileNotFoundException, PSTException, IOException { this(new File(fileName)); } public PSTFile(File fileName) throws FileNotFoundException, PSTException, IOException { // attempt to open the file. in = new RandomAccessFile(fileName, "r"); // get the first 4 bytes, should be !BDN try { byte[] temp = new byte[4]; in.read(temp); String strValue = new String(temp); if (!strValue.equals("!BDN")) { throw new PSTException("Invalid file header: "+strValue+", expected: !BDN"); } // make sure we are using a supported version of a PST... byte[] fileTypeBytes = new byte[2]; in.seek(10); in.read(fileTypeBytes); // ANSI file types can be 14 or 15: if (fileTypeBytes[0] == PSTFile.PST_TYPE_ANSI_2) { fileTypeBytes[0] = PSTFile.PST_TYPE_ANSI; } if (fileTypeBytes[0] != PSTFile.PST_TYPE_ANSI && fileTypeBytes[0] != PSTFile.PST_TYPE_UNICODE) { throw new PSTException("Unrecognised PST File version: "+fileTypeBytes[0]); } this.pstFileType = fileTypeBytes[0]; // make sure encryption is turned off at this stage... if (this.getPSTFileType() == PST_TYPE_ANSI) { in.seek(461); } else { in.seek(513); } encryptionType = in.readByte(); if (encryptionType == 0x02) { throw new PSTException("Only unencrypted and compressable PST files are supported at this time"); } // build out name to id map. processNameToIdMap(in); } catch (IOException err) { throw new PSTException("Unable to read PST Sig", err); } } private int pstFileType = 0; public int getPSTFileType() { return pstFileType; } /** * read the name-to-id map from the file and load it in * @param in * @throws IOException * @throws PSTException */ private void processNameToIdMap(RandomAccessFile in) throws IOException, PSTException { // Create our guid map for ( int i = 0; i < guidStrings.length; ++i ) { UUID uuid = UUID.fromString(guidStrings[i]); guidMap.put(uuid, i); /* System.out.printf("guidMap[{%s}] = %d\n", uuid.toString(), i); /**/ } // process the name to id map DescriptorIndexNode nameToIdMapDescriptorNode = (getDescriptorIndexNode(97)); //nameToIdMapDescriptorNode.readData(this); // get the descriptors if we have them HashMap<Integer, PSTDescriptorItem> localDescriptorItems = null; if (nameToIdMapDescriptorNode.localDescriptorsOffsetIndexIdentifier != 0) { //PSTDescriptor descriptor = new PSTDescriptor(this, nameToIdMapDescriptorNode.localDescriptorsOffsetIndexIdentifier); //localDescriptorItems = descriptor.getChildren(); localDescriptorItems = this.getPSTDescriptorItems(nameToIdMapDescriptorNode.localDescriptorsOffsetIndexIdentifier); } // process the map //PSTTableBC bcTable = new PSTTableBC(nameToIdMapDescriptorNode.dataBlock.data, nameToIdMapDescriptorNode.dataBlock.blockOffsets); OffsetIndexItem off = this.getOffsetIndexNode(nameToIdMapDescriptorNode.dataOffsetIndexIdentifier); PSTNodeInputStream nodein = new PSTNodeInputStream(this, off); byte[] tmp = new byte[1024]; nodein.read(tmp); PSTTableBC bcTable = new PSTTableBC(nodein); HashMap<Integer, PSTTableBCItem> tableItems = (bcTable.getItems()); // Get the guids PSTTableBCItem guidEntry = tableItems.get(2); // PidTagNameidStreamGuid guids = getData(guidEntry, localDescriptorItems); int nGuids = guids.length / 16; UUID[] uuidArray = new UUID[nGuids]; int[] uuidIndexes = new int[nGuids]; int offset = 0; for ( int i = 0; i < nGuids; ++i ) { long mostSigBits = (PSTObject.convertLittleEndianBytesToLong(guids, offset, offset+4) << 32) | (PSTObject.convertLittleEndianBytesToLong(guids, offset+4, offset+6) << 16) | PSTObject.convertLittleEndianBytesToLong(guids, offset+6, offset+8); long leastSigBits = PSTObject.convertBigEndianBytesToLong(guids, offset+8, offset+16); uuidArray[i] = new UUID(mostSigBits, leastSigBits); if ( guidMap.containsKey(uuidArray[i]) ) { uuidIndexes[i] = guidMap.get(uuidArray[i]); } else { uuidIndexes[i] = -1; // We don't know this guid } /* System.out.printf("uuidArray[%d] = {%s},%d\n", i, uuidArray[i].toString(), uuidIndexes[i]); /**/ offset += 16; } // if we have a reference to an internal descriptor PSTTableBCItem mapEntries = tableItems.get(3); // byte[] nameToIdByte = getData(mapEntries, localDescriptorItems); PSTTableBCItem stringMapEntries = tableItems.get(4); // byte[] stringNameToIdByte = getData(stringMapEntries, localDescriptorItems); // process the entries for (int x = 0; x+8 < nameToIdByte.length; x += 8) { int dwPropertyId = (int)PSTObject.convertLittleEndianBytesToLong(nameToIdByte, x, x+4); int wGuid = (int)PSTObject.convertLittleEndianBytesToLong(nameToIdByte, x+4, x+6); int wPropIdx = ((int)PSTObject.convertLittleEndianBytesToLong(nameToIdByte, x+6, x+8)); if ( (wGuid & 0x0001) == 0 ) { wPropIdx += 0x8000; wGuid >>= 1; int guidIndex; if ( wGuid == 1 ) { guidIndex = PS_MAPI; } else if ( wGuid == 2 ) { guidIndex = PS_PUBLIC_STRINGS; } else { guidIndex = uuidIndexes[wGuid-3]; } nameToId.put((long)dwPropertyId | ((long)guidIndex << 32), wPropIdx); idToName.put(wPropIdx, (long)dwPropertyId); /* System.out.printf("0x%08X:%04X, 0x%08X\n", dwPropertyId, guidIndex, wPropIdx); /**/ } else { // else the identifier is a string // dwPropertyId becomes thHke byte offset into the String stream in which the string name of the property is stored. int len = (int)PSTObject.convertLittleEndianBytesToLong( stringNameToIdByte, dwPropertyId, dwPropertyId+4 ); byte[] keyByteValue = new byte[len]; System.arraycopy(stringNameToIdByte, dwPropertyId+4, keyByteValue, 0, keyByteValue.length); wPropIdx += 0x8000; String key = new String(keyByteValue, "UTF-16LE"); stringToId.put(key, wPropIdx); idToString.put(wPropIdx, key); } } } private byte [] getData(PSTTableItem item, HashMap<Integer, PSTDescriptorItem> localDescriptorItems) throws IOException, PSTException { if ( item.data.length != 0 ) { return item.data; } if ( localDescriptorItems == null ) { throw new PSTException("External reference but no localDescriptorItems in PSTFile.getData()"); } if ( item.entryValueType != 0x0102 ) { throw new PSTException("Attempting to get non-binary data in PSTFile.getData()"); } PSTDescriptorItem mapDescriptorItem = localDescriptorItems.get(item.entryValueReference); if (mapDescriptorItem == null) { throw new PSTException ("not here "+item.entryValueReference + "\n"+localDescriptorItems.keySet()); } return mapDescriptorItem.getData(); } int getNameToIdMapItem(int key, int propertySetIndex) { long lKey = ((long)propertySetIndex << 32) | (long)key; Integer i = nameToId.get(lKey); if ( i == null ) { return -1; } return i; } int getPublicStringToIdMapItem(String key) { Integer i = this.stringToId.get(key); if (i == null) { return -1; } return i; } static long getNameToIdMapKey(int id) //throws PSTException { Long i = idToName.get(id); if ( i == null ) { //throw new PSTException("Name to Id mapping not found"); return -1; } return i; } static private Properties propertyInternetCodePages = null; static private boolean bCPFirstTime = true; static String getInternetCodePageCharset(int propertyId) { if ( bCPFirstTime ) { bCPFirstTime = false; propertyInternetCodePages = new Properties(); try { InputStream propertyStream = PSTFile.class.getResourceAsStream("/InternetCodepages.txt"); if ( propertyStream != null ) { propertyInternetCodePages.load(propertyStream); } else { propertyInternetCodePages = null; } } catch (FileNotFoundException e) { propertyInternetCodePages = null; e.printStackTrace(); } catch (IOException e) { propertyInternetCodePages = null; e.printStackTrace(); } } if ( propertyInternetCodePages != null ) { return propertyInternetCodePages.getProperty(propertyId+""); } return null; } static private Properties propertyNames = null; static private boolean bFirstTime = true; static String getPropertyName(int propertyId, boolean bNamed) { if ( bFirstTime ) { bFirstTime = false; propertyNames = new Properties(); try { InputStream propertyStream = PSTFile.class.getResourceAsStream("/PropertyNames.txt"); if ( propertyStream != null ) { propertyNames.load(propertyStream); } else { propertyNames = null; } } catch (FileNotFoundException e) { propertyNames = null; e.printStackTrace(); } catch (IOException e) { propertyNames = null; e.printStackTrace(); } } if ( propertyNames != null ) { String key = String.format((bNamed ? "%08X" : "%04X"), propertyId); return propertyNames.getProperty(key); } return null; } static String getPropertyDescription(int entryType, int entryValueType) { String ret = ""; if ( entryType < 0x8000 ) { String name = PSTFile.getPropertyName(entryType, false); if ( name != null ) { ret = String.format("%s:%04X: ", name, entryValueType); } else { ret = String.format("0x%04X:%04X: ", entryType, entryValueType); } } else { long type = PSTFile.getNameToIdMapKey(entryType); if ( type == -1 ) { ret = String.format("0xFFFF(%04X):%04X: ", entryType, entryValueType); } else { String name = PSTFile.getPropertyName((int)type, true); if ( name != null ) { ret = String.format("%s(%04X):%04X: ", name, entryType, entryValueType); } else { ret = String.format("0x%04X(%04X):%04X: ", type, entryType, entryValueType); } } } return ret; } /** * destructor just closes the file handle... */ @Override protected void finalize() throws IOException { in.close(); } /** * get the type of encryption the file uses * @return encryption type used in the PST File */ public int getEncryptionType() { return this.encryptionType; } /** * get the handle to the file we are currently accessing */ public RandomAccessFile getFileHandle() { return this.in; } /** * get the message store of the PST file. * Note that this doesn't really have much information, better to look under the root folder * @throws PSTException * @throws IOException */ public PSTMessageStore getMessageStore() throws PSTException, IOException { DescriptorIndexNode messageStoreDescriptor = getDescriptorIndexNode(MESSAGE_STORE_DESCRIPTOR_IDENTIFIER); return new PSTMessageStore(this, messageStoreDescriptor); } /** * get the root folder for the PST file. * You should find all of your data under here... * @throws PSTException * @throws IOException */ public PSTFolder getRootFolder() throws PSTException, IOException { DescriptorIndexNode rootFolderDescriptor = getDescriptorIndexNode(ROOT_FOLDER_DESCRIPTOR_IDENTIFIER); PSTFolder output = new PSTFolder(this, rootFolderDescriptor); return output; } PSTNodeInputStream readLeaf(long bid) throws IOException, PSTException { //PSTFileBlock ret = null; PSTNodeInputStream ret = null; // get the index node for the descriptor index OffsetIndexItem offsetItem = getOffsetIndexNode(bid); return new PSTNodeInputStream(this, offsetItem); } public int getLeafSize(long bid) throws IOException, PSTException { OffsetIndexItem offsetItem = getOffsetIndexNode(bid); // Internal block? if ( (offsetItem.indexIdentifier & 0x02) == 0 ) { // No, return the raw size return offsetItem.size; } // we only need the first 8 bytes byte[] data = new byte[8]; in.seek(offsetItem.fileOffset); in.read(data); // we are an array, get the sum of the sizes... return (int)PSTObject.convertLittleEndianBytesToLong(data, 4, 8); } /** * Read a file offset from the file * PST Files have this tendency to store file offsets (pointers) in 8 little endian bytes. * Convert this to a long for seeking to. * @param in handle for PST file * @param startOffset where to read the 8 bytes from * @return long representing the read location * @throws IOException */ protected long extractLEFileOffset(long startOffset) throws IOException { long offset = 0; if (this.getPSTFileType() == PSTFile.PST_TYPE_ANSI) { in.seek(startOffset); byte[] temp = new byte[4]; in.read(temp); offset |= temp[3] & 0xff; offset <<= 8; offset |= temp[2] & 0xff; offset <<= 8; offset |= temp[1] & 0xff; offset <<= 8; offset |= temp[0] & 0xff; } else { in.seek(startOffset); byte[] temp = new byte[8]; in.read(temp); offset = temp[7] & 0xff; long tmpLongValue; for (int x = 6; x >= 0; x--) { offset = offset << 8; tmpLongValue = (long)temp[x] & 0xff; offset |= tmpLongValue; } } return offset; } /** * Generic function used by getOffsetIndexNode and getDescriptorIndexNode for navigating the PST B-Trees * @param in * @param index * @param descTree * @return * @throws IOException * @throws PSTException */ private byte[] findBtreeItem(RandomAccessFile in, long index, boolean descTree) throws IOException, PSTException { long btreeStartOffset; // first find the starting point for the offset index if (this.getPSTFileType() == PST_TYPE_ANSI) { btreeStartOffset = this.extractLEFileOffset(196); if (descTree) { btreeStartOffset = this.extractLEFileOffset(188); } } else { btreeStartOffset = this.extractLEFileOffset(240); if (descTree) { btreeStartOffset = this.extractLEFileOffset(224); } } // okay, what we want to do is navigate the tree until you reach the bottom.... // try and read the index b-tree byte[] temp = new byte[2]; if (this.getPSTFileType() == PST_TYPE_ANSI) { in.seek(btreeStartOffset+500); } else { in.seek(btreeStartOffset+496); } in.read(temp); while ((temp[0] == 0xffffff80 && temp[1] == 0xffffff80 && !descTree) || (temp[0] == 0xffffff81 && temp[1] == 0xffffff81 && descTree)) { // get the rest of the data.... byte[] branchNodeItems; if (this.getPSTFileType() == PST_TYPE_ANSI) { branchNodeItems = new byte[496]; } else { branchNodeItems = new byte[488]; } in.seek(btreeStartOffset); in.read(branchNodeItems); int numberOfItems = in.read(); in.read(); // maxNumberOfItems in.read(); // itemSize int levelsToLeaf = in.read(); if (levelsToLeaf > 0) { boolean found = false; for (int x = 0; x < numberOfItems; x++) { if (this.getPSTFileType() == PST_TYPE_ANSI) { long indexIdOfFirstChildNode = extractLEFileOffset(btreeStartOffset + (x * 12)); if (indexIdOfFirstChildNode > index) { // get the address for the child first node in this group btreeStartOffset = extractLEFileOffset(btreeStartOffset+((x-1) * 12)+8); in.seek(btreeStartOffset+500); in.read(temp); found = true; break; } } else { long indexIdOfFirstChildNode = extractLEFileOffset(btreeStartOffset + (x * 24)); if (indexIdOfFirstChildNode > index) { // get the address for the child first node in this group btreeStartOffset = extractLEFileOffset(btreeStartOffset+((x-1) * 24)+16); in.seek(btreeStartOffset+496); in.read(temp); found = true; break; } } } if (!found) { // it must be in the very last branch... if (this.getPSTFileType() == PST_TYPE_ANSI) { btreeStartOffset = extractLEFileOffset(btreeStartOffset+((numberOfItems-1) * 12)+8); in.seek(btreeStartOffset+500); in.read(temp); } else { btreeStartOffset = extractLEFileOffset(btreeStartOffset+((numberOfItems-1) * 24)+16); in.seek(btreeStartOffset+496); in.read(temp); } } } else { // we are at the bottom of the tree... // we want to get our file offset! for (int x = 0; x < numberOfItems; x++) { if (this.getPSTFileType() == PSTFile.PST_TYPE_ANSI) { if (descTree) { // The 32-bit descriptor index b-tree leaf node item in.seek(btreeStartOffset + (x * 16)); temp = new byte[4]; in.read(temp); if (PSTObject.convertLittleEndianBytesToLong(temp) == index) { // give me the offset index please! in.seek(btreeStartOffset + (x * 16)); temp = new byte[16]; in.read(temp); return temp; } } else { // The 32-bit (file) offset index item long indexIdOfFirstChildNode = extractLEFileOffset(btreeStartOffset + (x * 12)); if (indexIdOfFirstChildNode == index) { // we found it!!!! OMG //System.out.println("item found as item #"+x); in.seek(btreeStartOffset + (x * 12)); temp = new byte[12]; in.read(temp); return temp; } } } else { if (descTree) { // The 64-bit descriptor index b-tree leaf node item in.seek(btreeStartOffset + (x * 32)); temp = new byte[4]; in.read(temp); if (PSTObject.convertLittleEndianBytesToLong(temp) == index) { // give me the offset index please! in.seek(btreeStartOffset + (x * 32)); temp = new byte[32]; in.read(temp); return temp; } } else { // The 64-bit (file) offset index item long indexIdOfFirstChildNode = extractLEFileOffset(btreeStartOffset + (x * 24)); if (indexIdOfFirstChildNode == index) { // we found it!!!! OMG //System.out.println("item found as item #"+x); in.seek(btreeStartOffset + (x * 24)); temp = new byte[24]; in.read(temp); return temp; } } } } throw new PSTException("Unable to find "+index); } } throw new PSTException("Unable to find node: "+index); } /** * navigate the internal descriptor B-Tree and find a specific item * @param in * @param identifier * @return the descriptor node for the item * @throws IOException * @throws PSTException */ DescriptorIndexNode getDescriptorIndexNode(long identifier) throws IOException, PSTException { return new DescriptorIndexNode(findBtreeItem(in, identifier, true), this.getPSTFileType()); } /** * navigate the internal index B-Tree and find a specific item * @param in * @param identifier * @return the offset index item * @throws IOException * @throws PSTException */ OffsetIndexItem getOffsetIndexNode(long identifier) throws IOException, PSTException { return new OffsetIndexItem(findBtreeItem(in, identifier, false), this.getPSTFileType()); } /** * parse a PSTDescriptor and get all of its items */ HashMap<Integer, PSTDescriptorItem> getPSTDescriptorItems(long localDescriptorsOffsetIndexIdentifier) throws PSTException, IOException { return this.getPSTDescriptorItems(this.readLeaf(localDescriptorsOffsetIndexIdentifier)); } HashMap<Integer, PSTDescriptorItem> getPSTDescriptorItems(PSTNodeInputStream in) throws PSTException, IOException { // make sure the signature is correct in.seek(0); int sig = in.read(); if (sig != 0x2) { throw new PSTException("Unable to process descriptor node, bad signature: "+sig); } HashMap<Integer, PSTDescriptorItem> output = new HashMap<Integer, PSTDescriptorItem>(); int numberOfItems = (int)in.seekAndReadLong(2, 2); int offset; if (this.getPSTFileType() == PSTFile.PST_TYPE_ANSI) { offset = 4; } else { offset = 8; } byte[] data = new byte[(int)in.length()]; in.seek(0); in.read(data); for (int x = 0; x < numberOfItems; x++) { PSTDescriptorItem item = new PSTDescriptorItem(data, offset, this); output.put(item.descriptorIdentifier, item); if (this.getPSTFileType() == PSTFile.PST_TYPE_ANSI) { offset += 12; } else { offset += 24; } } return output; } /** * Build the children descriptor tree * This goes through the entire descriptor B-Tree and adds every item to the childrenDescriptorTree. * This is used as fallback when the nodes that list file contents are broken. * @param in * @throws IOException * @throws PSTException */ LinkedHashMap<Integer, LinkedList<DescriptorIndexNode>> getChildDescriptorTree() throws IOException, PSTException { if (this.childrenDescriptorTree == null) { long btreeStartOffset = 0; if (this.getPSTFileType() == PST_TYPE_ANSI) { btreeStartOffset = this.extractLEFileOffset(188); } else { btreeStartOffset = this.extractLEFileOffset(224); } this.childrenDescriptorTree = new LinkedHashMap<Integer, LinkedList<DescriptorIndexNode>>(); processDescriptorBTree(btreeStartOffset); } return this.childrenDescriptorTree; } /** * Recursive function for building the descriptor tree, used by buildDescriptorTree * @param in * @param btreeStartOffset * @throws IOException * @throws PSTException */ private void processDescriptorBTree(long btreeStartOffset) throws IOException, PSTException { byte[] temp = new byte[2]; if (this.getPSTFileType() == PST_TYPE_ANSI) { in.seek(btreeStartOffset+500); } else { in.seek(btreeStartOffset+496); } in.read(temp); if ((temp[0] == 0xffffff81 && temp[1] == 0xffffff81)) { if (this.getPSTFileType() == PST_TYPE_ANSI) { in.seek(btreeStartOffset+496); } else { in.seek(btreeStartOffset+488); } int numberOfItems = in.read(); in.read(); // maxNumberOfItems in.read(); // itemSize int levelsToLeaf = in.read(); if (levelsToLeaf > 0) { for (int x = 0; x < numberOfItems; x++) { if (this.getPSTFileType() == PST_TYPE_ANSI) { long branchNodeItemStartIndex = (btreeStartOffset + (12*x)); long nextLevelStartsAt = this.extractLEFileOffset(branchNodeItemStartIndex+8); processDescriptorBTree(nextLevelStartsAt); } else { long branchNodeItemStartIndex = (btreeStartOffset + (24*x)); long nextLevelStartsAt = this.extractLEFileOffset(branchNodeItemStartIndex+16); processDescriptorBTree(nextLevelStartsAt); } } } else { for (int x = 0; x < numberOfItems; x++) { // The 64-bit descriptor index b-tree leaf node item // give me the offset index please! if (this.getPSTFileType() == PSTFile.PST_TYPE_ANSI) { in.seek(btreeStartOffset + (x * 16)); temp = new byte[16]; in.read(temp); } else { in.seek(btreeStartOffset + (x * 32)); temp = new byte[32]; in.read(temp); } DescriptorIndexNode tempNode = new DescriptorIndexNode(temp, this.getPSTFileType()); // we don't want to be children of ourselves... if (tempNode.parentDescriptorIndexIdentifier == tempNode.descriptorIdentifier) { // skip! } else if (childrenDescriptorTree.containsKey(tempNode.parentDescriptorIndexIdentifier)) { // add this entry to the existing list of children LinkedList<DescriptorIndexNode> children = childrenDescriptorTree.get(tempNode.parentDescriptorIndexIdentifier); children.add(tempNode); } else { // create a new entry and add this one to that LinkedList<DescriptorIndexNode> children = new LinkedList<DescriptorIndexNode>(); children.add(tempNode); childrenDescriptorTree.put(tempNode.parentDescriptorIndexIdentifier, children); } this.itemCount++; } } } else { PSTObject.printHexFormatted(temp, true); throw new PSTException("Unable to read descriptor node, is not a descriptor"); } } public void close() throws IOException { in.close(); } }
/* * Copyright (c) 2005-2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.sample.stockquote; import javax.jms.*; import javax.naming.NamingException; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.List; import java.util.Random; public class JsonStockQuoteClient { private static final String[] jsonMsgs = new String[]{ "{StockQuoteEvent:" + "{StockSymbol:LNKD," + "LastTradeAmount:240.36," + "StockChange:0.05," + "OpenAmount:245.05," + "DayHigh:260.46," + "DayLow:230.01," + "StockVolume:20452658," + "PrevCls:240.31," + "ChangePercent:0.20," + "FiftyTwoWeekRange:\"220.73 - 271.58\"," + "EarnPerShare:2.326," + "PE:10.88," + "CompanyName:\"LinkedIn Corp\"," + "QuoteError:false" + "}" + "}", "{StockQuoteEvent: " + "{ StockSymbol: FB," + "LastTradeAmount: 41.36, " + "StockChange: 0.15," + "OpenAmount: 40.05," + "DayHigh: 47.46," + "DayLow: 39.36," + "StockVolume: 20502658," + "PrevCls: 41.31," + "ChangePercent: 0.20," + "FiftyTwoWeekRange: \"35.73 - 51.58\"," + "EarnPerShare: -1.326," + "PE: 12.88," + "CompanyName: \"Facebook, Inc\"," + "QuoteError: false" + "}" + "}", "{StockQuoteEvent:" + "{StockSymbol:LNKD," + "LastTradeAmount:245.36," + "StockChange:0.05," + "OpenAmount:245.05," + "DayHigh:260.46," + "DayLow:230.01," + "StockVolume:20457000," + "PrevCls:245.46," + "ChangePercent:0.20," + "FiftyTwoWeekRange:\"220.73 - 271.58\"," + "EarnPerShare:2.326," + "PE:10.88," + "CompanyName:\"LinkedIn Corp\"," + "QuoteError:false" + "}" + "}", "{StockQuoteEvent:" + "{StockSymbol:LNKD," + "LastTradeAmount:245.60," + "StockChange:0.05," + "OpenAmount:245.05," + "DayHigh:260.46," + "DayLow:230.01," + "StockVolume:20457658," + "PrevCls:245.36," + "ChangePercent:0.20," + "FiftyTwoWeekRange:\"220.73 - 271.58\"," + "EarnPerShare:2.326," + "PE:10.88," + "CompanyName:\"LinkedIn Corp\"," + "QuoteError:false" + "}" + "}", "{StockQuoteEvent: " + "{ StockSymbol: FB," + "LastTradeAmount: 49.36, " + "StockChange: 0.15," + "OpenAmount: 40.05," + "DayHigh: 47.46," + "DayLow: 39.36," + "StockVolume: 20502658," + "PrevCls: 41.36," + "ChangePercent: 0.20," + "FiftyTwoWeekRange: \"35.73 - 51.58\"," + "EarnPerShare: -1.326," + "PE: 12.88," + "CompanyName: \"Facebook, Inc\"," + "QuoteError: false" + "}" + "}", "{StockQuoteEvent: " + "{ StockSymbol: FB," + "LastTradeAmount: 55.36, " + "StockChange: 0.15," + "OpenAmount: 40.05," + "DayHigh: 55.36," + "DayLow: 39.36," + "StockVolume: 20502658," + "PrevCls: 49.36," + "ChangePercent: 0.20," + "FiftyTwoWeekRange: \"35.73 - 55.36\"," + "EarnPerShare: -1.326," + "PE: 12.88," + "CompanyName: \"Facebook, Inc\"," + "QuoteError: false" + "}" + "}", "{StockQuoteEvent:" + "{StockSymbol:LNKD," + "LastTradeAmount:247.01," + "StockChange:0.05," + "OpenAmount:245.05," + "DayHigh:260.46," + "DayLow:230.01," + "StockVolume:20452658," + "PrevCls:247.00," + "ChangePercent:0.20," + "FiftyTwoWeekRange:\"220.73 - 271.58\"," + "EarnPerShare:2.326," + "PE:10.88," + "CompanyName:\"LinkedIn Corp\"," + "QuoteError:false" + "}" + "}"}; private static TopicConnectionFactory topicConnectionFactory = null; Random random; private List<StockCompany> stockCompanyList = new ArrayList<StockCompany>(); public JsonStockQuoteClient() { stockCompanyList.add(new StockCompany("LNKD", "LinkedIn Corp", 220.73, 271.58)); stockCompanyList.add(new StockCompany("FB", "Facebook, Inc.", 35.62, 55.36)); stockCompanyList.add(new StockCompany("GOOG", "Google Corp", 405.48, 525.31)); stockCompanyList.add(new StockCompany("WSO2", "WSO2, Inc.", 135.11, 178.66)); stockCompanyList.add(new StockCompany("RAX", "Rackspace, Inc.", 116.41, 123.36)); random = new Random(); } public static void main(String[] args) throws InterruptedException, NamingException { topicConnectionFactory = JNDIContext.getInstance().getTopicConnectionFactory(); JsonStockQuoteClient publisher = new JsonStockQuoteClient(); String topicName = args[0]; boolean batchedEvents = args[1].equalsIgnoreCase("true"); long nEvents = Long.valueOf(args[2]); Thread.sleep(2000); if (batchedEvents) { publisher.publishBatchedMessage(topicName, jsonMsgs); } else { publisher.publish(topicName, jsonMsgs, nEvents); } System.out.println("All Stock Messages sent"); } private String generateStockQuoteEvent() { StockCompany company = stockCompanyList.get(random.nextInt(stockCompanyList.size())); double lastTradeAmount = company.fiftyTwoWeekMin + random.nextDouble() * (company.fiftyTwoWeekMax - company.fiftyTwoWeekMin); int volume = random.nextInt(1000000); return "{StockQuoteEvent:" + "{StockSymbol:" + company.stockSymbol + "," + "LastTradeAmount:" + new DecimalFormat("#.##").format(lastTradeAmount) + "," + "StockVolume:" + volume + "," + "FiftyTwoWeekRange:\"" + company.fiftyTwoWeekMin + " - " + company.fiftyTwoWeekMax + "\"," + "CompanyName:\"" + company.companyName + "\"," + "QuoteError:false" + "}" + "}"; } /** * Publish message to given topic * * @param topicName - topic name to publish messages * @param messages - messages to send * @param nEvents - no of events to send */ public void publish(String topicName, String[] messages, long nEvents) throws InterruptedException { // create topic connection TopicConnection topicConnection = null; try { topicConnection = topicConnectionFactory.createTopicConnection(); topicConnection.start(); } catch (JMSException e) { System.out.println("Can not create topic connection." + e); return; } Session session = null; try { session = topicConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); Topic topic = session.createTopic(topicName); MessageProducer producer = session.createProducer(topic); System.out.println("Sending JSON messages on '" + topicName + "' topic"); if (nEvents == 0) { for (int i = 0, jsonMsgsLength = messages.length; i < jsonMsgsLength; i++) { String message = messages[i]; TextMessage jmsMessage = session.createTextMessage(); jmsMessage.setText(message); producer.send(jmsMessage); System.out.println("Stock Message " + (i + 1) + " sent"); } } else { for (long i = 0; i < nEvents; i++) { String message = generateStockQuoteEvent(); TextMessage jmsMessage = session.createTextMessage(); jmsMessage.setText(message); producer.send(jmsMessage); System.out.println("Stock Message " + (i + 1) + " sent"); if (i % 100 == 0) { Thread.sleep(1000); } } } producer.close(); session.close(); topicConnection.stop(); topicConnection.close(); } catch (JMSException e) { System.out.println("Can not subscribe." + e); } } /** * Publish message to given topic * * @param topicName - topic name to publish messages * @param messages - messages to send */ public void publishBatchedMessage(String topicName, String[] messages) { // create topic connection TopicConnection topicConnection = null; try { topicConnection = topicConnectionFactory.createTopicConnection(); topicConnection.start(); } catch (JMSException e) { System.out.println("Can not create topic connection." + e); return; } Session session = null; try { session = topicConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); Topic topic = session.createTopic(topicName); MessageProducer producer = session.createProducer(topic); System.out.println("Sending JSON messages on '" + topicName + "' topic"); String jsonArray = "["; for (int i = 0, jsonMsgsLength = messages.length; i < jsonMsgsLength; i++) { String message = messages[i]; jsonArray = jsonArray + message + ","; System.out.println("Added Stock Message " + (i + 1) + " to batch"); } jsonArray = jsonArray.replaceFirst(",$", "]"); TextMessage jmsMessage = session.createTextMessage(); jmsMessage.setText(jsonArray); producer.send(jmsMessage); System.out.println("Batched Stock message with " + messages.length + " sent"); producer.close(); session.close(); topicConnection.stop(); topicConnection.close(); } catch (JMSException e) { System.out.println("Can not subscribe." + e); } } private class StockCompany { String stockSymbol; String companyName; double fiftyTwoWeekMin; double fiftyTwoWeekMax; private StockCompany(String stockSymbol, String companyName, double fiftyTwoWeekMin, double fiftyTwoWeekMax) { this.stockSymbol = stockSymbol; this.companyName = companyName; this.fiftyTwoWeekMin = fiftyTwoWeekMin; this.fiftyTwoWeekMax = fiftyTwoWeekMax; } } }
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.ecmap; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.commons.lang3.tuple.Pair; import org.onlab.util.AbstractAccumulator; import org.onlab.util.KryoNamespace; import org.onlab.util.SlidingWindowCounter; import org.onosproject.cluster.ClusterService; import org.onosproject.cluster.ControllerNode; import org.onosproject.cluster.NodeId; import org.onosproject.persistence.PersistenceService; import org.onosproject.store.Timestamp; import org.onosproject.store.cluster.messaging.ClusterCommunicationService; import org.onosproject.store.cluster.messaging.MessageSubject; import org.onosproject.store.impl.LogicalTimestamp; import org.onosproject.store.serializers.KryoNamespaces; import org.onosproject.store.serializers.KryoSerializer; import org.onosproject.store.service.EventuallyConsistentMap; import org.onosproject.store.service.EventuallyConsistentMapEvent; import org.onosproject.store.service.EventuallyConsistentMapListener; import org.onosproject.store.service.Serializer; import org.onosproject.store.service.WallClockTimestamp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.Timer; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; import static org.onlab.util.BoundedThreadPool.newFixedThreadPool; import static org.onlab.util.Tools.groupedThreads; import static org.onosproject.store.service.EventuallyConsistentMapEvent.Type.PUT; import static org.onosproject.store.service.EventuallyConsistentMapEvent.Type.REMOVE; /** * Distributed Map implementation which uses optimistic replication and gossip * based techniques to provide an eventually consistent data store. */ public class EventuallyConsistentMapImpl<K, V> implements EventuallyConsistentMap<K, V> { private static final Logger log = LoggerFactory.getLogger(EventuallyConsistentMapImpl.class); private final Map<K, MapValue<V>> items; private final ClusterService clusterService; private final ClusterCommunicationService clusterCommunicator; private final KryoSerializer serializer; private final NodeId localNodeId; private final PersistenceService persistenceService; private final BiFunction<K, V, Timestamp> timestampProvider; private final MessageSubject updateMessageSubject; private final MessageSubject antiEntropyAdvertisementSubject; private final Set<EventuallyConsistentMapListener<K, V>> listeners = Sets.newCopyOnWriteArraySet(); private final ExecutorService executor; private final ScheduledExecutorService backgroundExecutor; private final BiFunction<K, V, Collection<NodeId>> peerUpdateFunction; private final ExecutorService communicationExecutor; private final Map<NodeId, EventAccumulator> senderPending; private final String mapName; private volatile boolean destroyed = false; private static final String ERROR_DESTROYED = " map is already destroyed"; private final String destroyedMessage; private static final String ERROR_NULL_KEY = "Key cannot be null"; private static final String ERROR_NULL_VALUE = "Null values are not allowed"; private final long initialDelaySec = 5; private final boolean lightweightAntiEntropy; private final boolean tombstonesDisabled; private static final int WINDOW_SIZE = 5; private static final int HIGH_LOAD_THRESHOLD = 0; private static final int LOAD_WINDOW = 2; private SlidingWindowCounter counter = new SlidingWindowCounter(WINDOW_SIZE); private final boolean persistent; private static final String PERSISTENT_LOCAL_MAP_NAME = "itemsMap"; /** * Creates a new eventually consistent map shared amongst multiple instances. * <p> * See {@link org.onosproject.store.service.EventuallyConsistentMapBuilder} * for more description of the parameters expected by the map. * </p> * * @param mapName a String identifier for the map. * @param clusterService the cluster service * @param clusterCommunicator the cluster communications service * @param serializerBuilder a Kryo namespace builder that can serialize * both K and V * @param timestampProvider provider of timestamps for K and V * @param peerUpdateFunction function that provides a set of nodes to immediately * update to when there writes to the map * @param eventExecutor executor to use for processing incoming * events from peers * @param communicationExecutor executor to use for sending events to peers * @param backgroundExecutor executor to use for background anti-entropy * tasks * @param tombstonesDisabled true if this map should not maintain * tombstones * @param antiEntropyPeriod period that the anti-entropy task should run * @param antiEntropyTimeUnit time unit for anti-entropy period * @param convergeFaster make anti-entropy try to converge faster * @param persistent persist data to disk */ EventuallyConsistentMapImpl(String mapName, ClusterService clusterService, ClusterCommunicationService clusterCommunicator, KryoNamespace.Builder serializerBuilder, BiFunction<K, V, Timestamp> timestampProvider, BiFunction<K, V, Collection<NodeId>> peerUpdateFunction, ExecutorService eventExecutor, ExecutorService communicationExecutor, ScheduledExecutorService backgroundExecutor, boolean tombstonesDisabled, long antiEntropyPeriod, TimeUnit antiEntropyTimeUnit, boolean convergeFaster, boolean persistent, PersistenceService persistenceService) { this.mapName = mapName; this.serializer = createSerializer(serializerBuilder); this.persistenceService = persistenceService; this.persistent = persistent; if (persistent) { items = this.persistenceService.<K, MapValue<V>>persistentMapBuilder() .withName(PERSISTENT_LOCAL_MAP_NAME) .withSerializer(new Serializer() { @Override public <T> byte[] encode(T object) { return EventuallyConsistentMapImpl.this.serializer.encode(object); } @Override public <T> T decode(byte[] bytes) { return EventuallyConsistentMapImpl.this.serializer.decode(bytes); } }) .build(); } else { items = Maps.newConcurrentMap(); } senderPending = Maps.newConcurrentMap(); destroyedMessage = mapName + ERROR_DESTROYED; this.clusterService = clusterService; this.clusterCommunicator = clusterCommunicator; this.localNodeId = clusterService.getLocalNode().id(); this.timestampProvider = timestampProvider; if (peerUpdateFunction != null) { this.peerUpdateFunction = peerUpdateFunction; } else { this.peerUpdateFunction = (key, value) -> clusterService.getNodes().stream() .map(ControllerNode::id) .filter(nodeId -> !nodeId.equals(localNodeId)) .collect(Collectors.toList()); } if (eventExecutor != null) { this.executor = eventExecutor; } else { // should be a normal executor; it's used for receiving messages this.executor = Executors.newFixedThreadPool(8, groupedThreads("onos/ecm", mapName + "-fg-%d")); } if (communicationExecutor != null) { this.communicationExecutor = communicationExecutor; } else { // sending executor; should be capped //TODO this probably doesn't need to be bounded anymore this.communicationExecutor = newFixedThreadPool(8, groupedThreads("onos/ecm", mapName + "-publish-%d")); } if (backgroundExecutor != null) { this.backgroundExecutor = backgroundExecutor; } else { this.backgroundExecutor = newSingleThreadScheduledExecutor(groupedThreads("onos/ecm", mapName + "-bg-%d")); } // start anti-entropy thread this.backgroundExecutor.scheduleAtFixedRate(this::sendAdvertisement, initialDelaySec, antiEntropyPeriod, antiEntropyTimeUnit); updateMessageSubject = new MessageSubject("ecm-" + mapName + "-update"); clusterCommunicator.addSubscriber(updateMessageSubject, serializer::decode, this::processUpdates, this.executor); antiEntropyAdvertisementSubject = new MessageSubject("ecm-" + mapName + "-anti-entropy"); clusterCommunicator.addSubscriber(antiEntropyAdvertisementSubject, serializer::decode, this::handleAntiEntropyAdvertisement, this.backgroundExecutor); this.tombstonesDisabled = tombstonesDisabled; this.lightweightAntiEntropy = !convergeFaster; } private KryoSerializer createSerializer(KryoNamespace.Builder builder) { return new KryoSerializer() { @Override protected void setupKryoPool() { // Add the map's internal helper classes to the user-supplied serializer serializerPool = builder .register(KryoNamespaces.BASIC) .nextId(KryoNamespaces.BEGIN_USER_CUSTOM_ID) .register(LogicalTimestamp.class) .register(WallClockTimestamp.class) .register(AntiEntropyAdvertisement.class) .register(UpdateEntry.class) .register(MapValue.class) .register(MapValue.Digest.class) .build(); } }; } @Override public int size() { checkState(!destroyed, destroyedMessage); // TODO: Maintain a separate counter for tracking live elements in map. return Maps.filterValues(items, MapValue::isAlive).size(); } @Override public boolean isEmpty() { checkState(!destroyed, destroyedMessage); return size() == 0; } @Override public boolean containsKey(K key) { checkState(!destroyed, destroyedMessage); checkNotNull(key, ERROR_NULL_KEY); return get(key) != null; } @Override public boolean containsValue(V value) { checkState(!destroyed, destroyedMessage); checkNotNull(value, ERROR_NULL_VALUE); return items.values() .stream() .filter(MapValue::isAlive) .anyMatch(v -> value.equals(v.get())); } @Override public V get(K key) { checkState(!destroyed, destroyedMessage); checkNotNull(key, ERROR_NULL_KEY); MapValue<V> value = items.get(key); return (value == null || value.isTombstone()) ? null : value.get(); } @Override public void put(K key, V value) { checkState(!destroyed, destroyedMessage); checkNotNull(key, ERROR_NULL_KEY); checkNotNull(value, ERROR_NULL_VALUE); MapValue<V> newValue = new MapValue<>(value, timestampProvider.apply(key, value)); if (putInternal(key, newValue)) { notifyPeers(new UpdateEntry<>(key, newValue), peerUpdateFunction.apply(key, value)); notifyListeners(new EventuallyConsistentMapEvent<>(mapName, PUT, key, value)); } } @Override public V remove(K key) { checkState(!destroyed, destroyedMessage); checkNotNull(key, ERROR_NULL_KEY); return removeAndNotify(key, null); } @Override public void remove(K key, V value) { checkState(!destroyed, destroyedMessage); checkNotNull(key, ERROR_NULL_KEY); checkNotNull(value, ERROR_NULL_VALUE); removeAndNotify(key, value); } private V removeAndNotify(K key, V value) { Timestamp timestamp = timestampProvider.apply(key, value); Optional<MapValue<V>> tombstone = tombstonesDisabled || timestamp == null ? Optional.empty() : Optional.of(MapValue.tombstone(timestamp)); MapValue<V> previousValue = removeInternal(key, Optional.ofNullable(value), tombstone); if (previousValue != null) { notifyPeers(new UpdateEntry<>(key, tombstone.orElse(null)), peerUpdateFunction.apply(key, previousValue.get())); if (previousValue.isAlive()) { notifyListeners(new EventuallyConsistentMapEvent<>(mapName, REMOVE, key, previousValue.get())); } } return previousValue != null ? previousValue.get() : null; } private MapValue<V> removeInternal(K key, Optional<V> value, Optional<MapValue<V>> tombstone) { checkState(!destroyed, destroyedMessage); checkNotNull(key, ERROR_NULL_KEY); checkNotNull(value, ERROR_NULL_VALUE); tombstone.ifPresent(v -> checkState(v.isTombstone())); counter.incrementCount(); AtomicBoolean updated = new AtomicBoolean(false); AtomicReference<MapValue<V>> previousValue = new AtomicReference<>(); items.compute(key, (k, existing) -> { boolean valueMatches = true; if (value.isPresent() && existing != null && existing.isAlive()) { valueMatches = Objects.equals(value.get(), existing.get()); } if (existing == null) { log.trace("ECMap Remove: Existing value for key {} is already null", k); } if (valueMatches) { if (existing == null) { updated.set(tombstone.isPresent()); } else { updated.set(!tombstone.isPresent() || tombstone.get().isNewerThan(existing)); } } if (updated.get()) { previousValue.set(existing); return tombstone.orElse(null); } else { return existing; } }); return previousValue.get(); } @Override public V compute(K key, BiFunction<K, V, V> recomputeFunction) { checkState(!destroyed, destroyedMessage); checkNotNull(key, ERROR_NULL_KEY); checkNotNull(recomputeFunction, "Recompute function cannot be null"); AtomicBoolean updated = new AtomicBoolean(false); AtomicReference<MapValue<V>> previousValue = new AtomicReference<>(); MapValue<V> computedValue = items.compute(key, (k, mv) -> { previousValue.set(mv); V newRawValue = recomputeFunction.apply(key, mv == null ? null : mv.get()); MapValue<V> newValue = new MapValue<>(newRawValue, timestampProvider.apply(key, newRawValue)); if (mv == null || newValue.isNewerThan(mv)) { updated.set(true); return newValue; } else { return mv; } }); if (updated.get()) { notifyPeers(new UpdateEntry<>(key, computedValue), peerUpdateFunction.apply(key, computedValue.get())); EventuallyConsistentMapEvent.Type updateType = computedValue.isTombstone() ? REMOVE : PUT; V value = computedValue.isTombstone() ? previousValue.get() == null ? null : previousValue.get().get() : computedValue.get(); if (value != null) { notifyListeners(new EventuallyConsistentMapEvent<>(mapName, updateType, key, value)); } } return computedValue.get(); } @Override public void putAll(Map<? extends K, ? extends V> m) { checkState(!destroyed, destroyedMessage); m.forEach(this::put); } @Override public void clear() { checkState(!destroyed, destroyedMessage); Maps.filterValues(items, MapValue::isAlive) .forEach((k, v) -> remove(k)); } @Override public Set<K> keySet() { checkState(!destroyed, destroyedMessage); return Maps.filterValues(items, MapValue::isAlive) .keySet(); } @Override public Collection<V> values() { checkState(!destroyed, destroyedMessage); return Collections2.transform(Maps.filterValues(items, MapValue::isAlive).values(), MapValue::get); } @Override public Set<Map.Entry<K, V>> entrySet() { checkState(!destroyed, destroyedMessage); return Maps.filterValues(items, MapValue::isAlive) .entrySet() .stream() .map(e -> Pair.of(e.getKey(), e.getValue().get())) .collect(Collectors.toSet()); } /** * Returns true if newValue was accepted i.e. map is updated. * * @param key key * @param newValue proposed new value * @return true if update happened; false if map already contains a more recent value for the key */ private boolean putInternal(K key, MapValue<V> newValue) { checkState(!destroyed, destroyedMessage); checkNotNull(key, ERROR_NULL_KEY); checkNotNull(newValue, ERROR_NULL_VALUE); checkState(newValue.isAlive()); counter.incrementCount(); AtomicBoolean updated = new AtomicBoolean(false); items.compute(key, (k, existing) -> { if (existing == null || newValue.isNewerThan(existing)) { updated.set(true); return newValue; } return existing; }); return updated.get(); } @Override public void addListener(EventuallyConsistentMapListener<K, V> listener) { checkState(!destroyed, destroyedMessage); listeners.add(checkNotNull(listener)); } @Override public void removeListener(EventuallyConsistentMapListener<K, V> listener) { checkState(!destroyed, destroyedMessage); listeners.remove(checkNotNull(listener)); } @Override public void destroy() { destroyed = true; executor.shutdown(); backgroundExecutor.shutdown(); communicationExecutor.shutdown(); listeners.clear(); clusterCommunicator.removeSubscriber(updateMessageSubject); clusterCommunicator.removeSubscriber(antiEntropyAdvertisementSubject); } private void notifyListeners(EventuallyConsistentMapEvent<K, V> event) { listeners.forEach(listener -> listener.event(event)); } private void notifyPeers(UpdateEntry<K, V> event, Collection<NodeId> peers) { queueUpdate(event, peers); } private void queueUpdate(UpdateEntry<K, V> event, Collection<NodeId> peers) { if (peers == null) { // we have no friends :( return; } peers.forEach(node -> senderPending.computeIfAbsent(node, unusedKey -> new EventAccumulator(node)).add(event) ); } private boolean underHighLoad() { return counter.get(LOAD_WINDOW) > HIGH_LOAD_THRESHOLD; } private void sendAdvertisement() { try { if (underHighLoad() || destroyed) { return; } pickRandomActivePeer().ifPresent(this::sendAdvertisementToPeer); } catch (Exception e) { // Catch all exceptions to avoid scheduled task being suppressed. log.error("Exception thrown while sending advertisement", e); } } private Optional<NodeId> pickRandomActivePeer() { List<NodeId> activePeers = clusterService.getNodes() .stream() .map(ControllerNode::id) .filter(id -> !localNodeId.equals(id)) .filter(id -> clusterService.getState(id) == ControllerNode.State.ACTIVE) .collect(Collectors.toList()); Collections.shuffle(activePeers); return activePeers.isEmpty() ? Optional.empty() : Optional.of(activePeers.get(0)); } private void sendAdvertisementToPeer(NodeId peer) { clusterCommunicator.unicast(createAdvertisement(), antiEntropyAdvertisementSubject, serializer::encode, peer) .whenComplete((result, error) -> { if (error != null) { log.debug("Failed to send anti-entropy advertisement to {}", peer, error); } }); } private AntiEntropyAdvertisement<K> createAdvertisement() { return new AntiEntropyAdvertisement<K>(localNodeId, ImmutableMap.copyOf(Maps.transformValues(items, MapValue::digest))); } private void handleAntiEntropyAdvertisement(AntiEntropyAdvertisement<K> ad) { if (destroyed || underHighLoad()) { return; } try { if (log.isTraceEnabled()) { log.trace("Received anti-entropy advertisement from {} for {} with {} entries in it", mapName, ad.sender(), ad.digest().size()); } antiEntropyCheckLocalItems(ad).forEach(this::notifyListeners); if (!lightweightAntiEntropy) { // if remote ad has any entries that the local copy is missing, actively sync // TODO: Missing keys is not the way local copy can be behind. if (Sets.difference(ad.digest().keySet(), items.keySet()).size() > 0) { // TODO: Send ad for missing keys and for entries that are stale sendAdvertisementToPeer(ad.sender()); } } } catch (Exception e) { log.warn("Error handling anti-entropy advertisement", e); } } /** * Processes anti-entropy ad from peer by taking following actions: * 1. If peer has an old entry, updates peer. * 2. If peer indicates an entry is removed and has a more recent * timestamp than the local entry, update local state. */ private List<EventuallyConsistentMapEvent<K, V>> antiEntropyCheckLocalItems( AntiEntropyAdvertisement<K> ad) { final List<EventuallyConsistentMapEvent<K, V>> externalEvents = Lists.newLinkedList(); final NodeId sender = ad.sender(); items.forEach((key, localValue) -> { MapValue.Digest remoteValueDigest = ad.digest().get(key); if (remoteValueDigest == null || localValue.isNewerThan(remoteValueDigest.timestamp())) { // local value is more recent, push to sender queueUpdate(new UpdateEntry<>(key, localValue), ImmutableList.of(sender)); } if (remoteValueDigest != null && remoteValueDigest.isNewerThan(localValue.digest()) && remoteValueDigest.isTombstone()) { MapValue<V> tombstone = MapValue.tombstone(remoteValueDigest.timestamp()); MapValue<V> previousValue = removeInternal(key, Optional.empty(), Optional.of(tombstone)); if (previousValue != null && previousValue.isAlive()) { externalEvents.add(new EventuallyConsistentMapEvent<>(mapName, REMOVE, key, previousValue.get())); } } }); return externalEvents; } private void processUpdates(Collection<UpdateEntry<K, V>> updates) { if (destroyed) { return; } updates.forEach(update -> { final K key = update.key(); final MapValue<V> value = update.value(); if (value == null || value.isTombstone()) { MapValue<V> previousValue = removeInternal(key, Optional.empty(), Optional.ofNullable(value)); if (previousValue != null && previousValue.isAlive()) { notifyListeners(new EventuallyConsistentMapEvent<>(mapName, REMOVE, key, previousValue.get())); } } else if (putInternal(key, value)) { notifyListeners(new EventuallyConsistentMapEvent<>(mapName, PUT, key, value.get())); } }); } // TODO pull this into the class if this gets pulled out... private static final int DEFAULT_MAX_EVENTS = 1000; private static final int DEFAULT_MAX_IDLE_MS = 10; private static final int DEFAULT_MAX_BATCH_MS = 50; private static final Timer TIMER = new Timer("onos-ecm-sender-events"); private final class EventAccumulator extends AbstractAccumulator<UpdateEntry<K, V>> { private final NodeId peer; private EventAccumulator(NodeId peer) { super(TIMER, DEFAULT_MAX_EVENTS, DEFAULT_MAX_BATCH_MS, DEFAULT_MAX_IDLE_MS); this.peer = peer; } @Override public void processItems(List<UpdateEntry<K, V>> items) { Map<K, UpdateEntry<K, V>> map = Maps.newHashMap(); items.forEach(item -> map.compute(item.key(), (key, existing) -> item.isNewerThan(existing) ? item : existing)); communicationExecutor.submit(() -> { clusterCommunicator.unicast(ImmutableList.copyOf(map.values()), updateMessageSubject, serializer::encode, peer) .whenComplete((result, error) -> { if (error != null) { log.debug("Failed to send to {}", peer, error); } }); }); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.transform.stc; import groovy.lang.GroovyRuntimeException; import org.codehaus.groovy.ast.ClassHelper; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.ast.GenericsType; import org.codehaus.groovy.ast.tools.WideningCategories; import org.codehaus.groovy.classgen.asm.BytecodeHelper; import org.codehaus.groovy.runtime.EncodingGroovyMethods; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.StringWriter; import static org.codehaus.groovy.ast.ClassHelper.OBJECT_TYPE; import static org.codehaus.groovy.ast.ClassHelper.VOID_TYPE; import static org.codehaus.groovy.ast.ClassHelper.boolean_TYPE; import static org.codehaus.groovy.ast.ClassHelper.byte_TYPE; import static org.codehaus.groovy.ast.ClassHelper.char_TYPE; import static org.codehaus.groovy.ast.ClassHelper.double_TYPE; import static org.codehaus.groovy.ast.ClassHelper.float_TYPE; import static org.codehaus.groovy.ast.ClassHelper.int_TYPE; import static org.codehaus.groovy.ast.ClassHelper.long_TYPE; import static org.codehaus.groovy.ast.ClassHelper.short_TYPE; /** * First implementation of an inferred type signature codec. * * @author Cedric Champeau */ public class SignatureCodecVersion1 implements SignatureCodec { private final ClassLoader classLoader; public SignatureCodecVersion1(final ClassLoader classLoader) { this.classLoader = classLoader; } private void doEncode(final ClassNode node, DataOutputStream dos) throws IOException { dos.writeUTF(node.getClass().getSimpleName()); if (node instanceof UnionTypeClassNode) { UnionTypeClassNode union = (UnionTypeClassNode) node; ClassNode[] delegates = union.getDelegates(); dos.writeInt(delegates.length); for (ClassNode delegate : delegates) { doEncode(delegate, dos); } return; } else if (node instanceof WideningCategories.LowestUpperBoundClassNode) { WideningCategories.LowestUpperBoundClassNode lub = (WideningCategories.LowestUpperBoundClassNode) node; dos.writeUTF(lub.getLubName()); doEncode(lub.getUnresolvedSuperClass(), dos); ClassNode[] interfaces = lub.getInterfaces(); if (interfaces == null) { dos.writeInt(-1); } else { dos.writeInt(interfaces.length); for (ClassNode anInterface : interfaces) { doEncode(anInterface, dos); } } return; } if (node.isArray()) { dos.writeBoolean(true); doEncode(node.getComponentType(), dos); } else { dos.writeBoolean(false); dos.writeUTF(BytecodeHelper.getTypeDescription(node)); dos.writeBoolean(node.isUsingGenerics()); GenericsType[] genericsTypes = node.getGenericsTypes(); if (genericsTypes == null) { dos.writeInt(-1); } else { dos.writeInt(genericsTypes.length); for (GenericsType type : genericsTypes) { dos.writeBoolean(type.isPlaceholder()); dos.writeBoolean(type.isWildcard()); doEncode(type.getType(), dos); ClassNode lb = type.getLowerBound(); if (lb == null) { dos.writeBoolean(false); } else { dos.writeBoolean(true); doEncode(lb, dos); } ClassNode[] upperBounds = type.getUpperBounds(); if (upperBounds == null) { dos.writeInt(-1); } else { dos.writeInt(upperBounds.length); for (ClassNode bound : upperBounds) { doEncode(bound, dos); } } } } } } public String encode(final ClassNode node) { ByteArrayOutputStream baos = new ByteArrayOutputStream(128); DataOutputStream dos = new DataOutputStream(baos); StringWriter wrt = new StringWriter(); String encoded = null; try { doEncode(node, dos); EncodingGroovyMethods.encodeBase64(baos.toByteArray()).writeTo(wrt); encoded = wrt.toString(); } catch (IOException e) { throw new GroovyRuntimeException("Unable to serialize type information", e); } return encoded; } private ClassNode doDecode(final DataInputStream dis) throws IOException { String classNodeType = dis.readUTF(); if (UnionTypeClassNode.class.getSimpleName().equals(classNodeType)) { int len = dis.readInt(); ClassNode[] delegates = new ClassNode[len]; for (int i = 0; i < len; i++) { delegates[i] = doDecode(dis); } return new UnionTypeClassNode(delegates); } else if (WideningCategories.LowestUpperBoundClassNode.class.getSimpleName().equals(classNodeType)) { String name = dis.readUTF(); ClassNode upper = doDecode(dis); int len = dis.readInt(); ClassNode[] interfaces = null; if (len >= 0) { interfaces = new ClassNode[len]; for (int i = 0; i < len; i++) { interfaces[i] = doDecode(dis); } } return new WideningCategories.LowestUpperBoundClassNode(name, upper, interfaces); } boolean makeArray = dis.readBoolean(); if (makeArray) { return doDecode(dis).makeArray(); } String typedesc = dis.readUTF(); char typeCode = typedesc.charAt(0); ClassNode result = OBJECT_TYPE; if (typeCode == 'L') { // object type String className = typedesc.replace('/', '.').substring(1, typedesc.length() - 1); try { result = ClassHelper.make(Class.forName(className, false, classLoader)).getPlainNodeReference(); } catch (ClassNotFoundException e) { result = ClassHelper.make(className); } result.setUsingGenerics(dis.readBoolean()); int len = dis.readInt(); if (len >= 0) { GenericsType[] gts = new GenericsType[len]; for (int i = 0; i < len; i++) { boolean placeholder = dis.readBoolean(); boolean wildcard = dis.readBoolean(); ClassNode type = doDecode(dis); boolean low = dis.readBoolean(); ClassNode lb = null; if (low) { lb = doDecode(dis); } int upc = dis.readInt(); ClassNode[] ups = null; if (upc >= 0) { ups = new ClassNode[upc]; for (int j = 0; j < upc; j++) { ups[j] = doDecode(dis); } } GenericsType gt = new GenericsType( type, ups, lb ); gt.setPlaceholder(placeholder); gt.setWildcard(wildcard); gts[i] = gt; } result.setGenericsTypes(gts); } } else { // primitive type switch (typeCode) { case 'I': result = int_TYPE; break; case 'Z': result = boolean_TYPE; break; case 'B': result = byte_TYPE; break; case 'C': result = char_TYPE; break; case 'S': result = short_TYPE; break; case 'D': result = double_TYPE; break; case 'F': result = float_TYPE; break; case 'J': result = long_TYPE; break; case 'V': result = VOID_TYPE; break; } } return result; } public ClassNode decode(final String signature) { DataInputStream dis = new DataInputStream( new ByteArrayInputStream(EncodingGroovyMethods.decodeBase64(signature))); try { return doDecode(dis); } catch (IOException e) { throw new GroovyRuntimeException("Unable to read type information", e); } } }
package org.lifenoodles.jargparse; import org.lifenoodles.jargparse.exceptions.ArgumentCountException; import org.lifenoodles.jargparse.exceptions.BadArgumentException; import org.lifenoodles.jargparse.exceptions.RequiredOptionException; import org.lifenoodles.jargparse.exceptions.UnknownOptionException; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Parses an array of strings looking for specified patterns, contains methods * to register options as well as validate input. If application name is not * set it defaults to "AppName". If prefixes are not set the only recognised * prefixes are "-" and "--" * * @author Donagh Hatton * created on 06/07/2014. */ public class OptionParser { private final Map<String, Validator> validators = new HashMap<>(); private final Map<String, OptionValidator> optionValidators = new HashMap<>(); private final List<PositionalValidator> positionalValidators = new ArrayList<>(); private final String applicationName; public OptionParser() { this("AppName"); } /** * Create on OptionParser with a different default name * * @param applicationName alternate application name */ public OptionParser(final String applicationName) { this.applicationName = applicationName; } /** * Gets a formatted help message for use as a standard response in the * event of bad inputs. The HelpfulArgumentParser invokes this automatically * * @return formatted help text */ public String getHelpText() { StringBuilder builder = new StringBuilder(getUsageText()); builder.append(System.lineSeparator()); if (!positionalValidators.isEmpty()) { builder.append(System.lineSeparator()) .append("positional arguments:") .append(System.lineSeparator()); for (Validator validator : positionalValidators) { builder.append(" ").append(validator.getName()) .append(System.lineSeparator()).append("\t") .append(validator.getDescription()) .append(System.lineSeparator()); } } builder.append(System.lineSeparator()).append("optional arguments:") .append(System.lineSeparator()); for (Validator validator : new HashSet<>(optionValidators.values())) { builder.append(" ").append(validator.getName()); builder.append(" ").append(validator.formatLabels()); for (String alias : Utility.dropN(1, validator.getNames())) { builder.append(", ").append(alias); builder.append(" ").append(validator.formatLabels()); } builder.append(System.lineSeparator()).append("\t") .append(validator.getDescription()) .append(System.lineSeparator()); } return builder.toString(); } /** * Gets a formatted string as a standard usage message. * * @return a formatted usage message */ public String getUsageText() { StringBuilder builder = new StringBuilder("usage: "). append(applicationName); for (Validator validator : new HashSet<>(optionValidators.values())) { builder.append(" [").append(validator.formatHelp()).append("]"); } for (Validator validator : positionalValidators) { builder.append(" ").append(validator.formatHelp()); } return builder.toString(); } /** * Add an option to this parser * * @param option option to add * @return this */ public OptionParser add(Option option) { OptionValidator validator = option.make(); registerValidator(validator); validator.getNames().stream() .forEach(x -> optionValidators.put(x, validator)); return this; } /** * Add an option to this parser * * @param option option to add * @return this */ public OptionParser add(Positional option) { PositionalValidator validator = option.make(); registerValidator(validator); if (validator.maximumArgumentCount() == 0) { throw new IllegalArgumentException(String.format( "Positional option: %s must take at least 1 argument", validator.getName())); } positionalValidators.add(validator); return this; } /** * Parse the provided arguments using any rules that have been registered * * @param options the array of arguments * @return an OptionSet containing all of the parsed methods * @throws ArgumentCountException if the count of arguments is incorrect * @throws UnknownOptionException if there exist unknown options * @throws BadArgumentException if any arguments to an option are illegal */ public OptionSet parse(final String... options) throws ArgumentCountException, UnknownOptionException, BadArgumentException, RequiredOptionException { StateParser parser = new StateParser(positionalValidators, optionValidators, options); while (!parser.isDone()) { parser.execute(); } // check parser results for sanity if (!parser.badArguments.isEmpty()) { final String optionName = parser.badArguments.keySet().stream() .findFirst().get(); throw new BadArgumentException(optionName, parser.badArguments.get(optionName).get(0)); } if (!parser.unrecognisedOptions.isEmpty()) { throw new UnknownOptionException(parser.unrecognisedOptions .iterator().next()); } List<String> badArgumentCounts = parser.namesToArgumentCounts.entrySet() .stream().filter(x -> validators.get(x.getKey()) .minimumArgumentCount() > x.getValue()) .map(Map.Entry::getKey).collect(Collectors.toList()); // check for bad arguments if (!badArgumentCounts.isEmpty()) { final Validator v = optionValidators.get(badArgumentCounts.get(0)); throw new ArgumentCountException(badArgumentCounts.get(0), v.minimumArgumentCount(), parser.namesToArgumentCounts.get(badArgumentCounts.get(0))); } // check required options if (optionValidators.values().stream() .filter(OptionValidator::isHelper).map(Validator::getName) .filter(parser.optionSet::contains).count() == 0) { List<Validator> missing = Stream.concat( optionValidators.values().stream() .filter(OptionValidator::isRequired), positionalValidators.stream() .filter(x -> x.minimumArgumentCount() > 0)) .filter(x -> !parser.optionSet.contains(x.getName())) .collect(Collectors.toList()); if (!missing.isEmpty()) { throw new RequiredOptionException(missing.get(0).getName()); } } return parser.optionSet; } private void registerValidator(Validator validator) { if (validator.getNames().stream().anyMatch(validators::containsKey)) { throw new IllegalArgumentException(String.format( "name %s already registered with this parser", validator.getName())); } validator.getNames().stream() .forEach(x -> validators.put(x, validator)); } /** * Represents the state of the StateParser */ private enum State { READ_OPTION, READ_ARGUMENT, DONE } /** * Implementation of a FSM to parse the given arguments */ private class StateParser { // public data for reporting parser state public final OptionSet optionSet = new OptionSet(); public final Set<String> unrecognisedOptions = new HashSet<>(); public final Map<String, List<String>> badArguments = new HashMap<>(); // data passed to parser private final Map<String, Validator> namesToValidators; // state variables private final Iterator<PositionalValidator> positionalIterator; private final Map<String, Integer> namesToArgumentCounts = new HashMap<>(); private List<String> arguments; private List<String> parsedArguments = new ArrayList<>(); private String optionName; private State currentState = State.READ_OPTION; private Validator validator; public StateParser(final List<PositionalValidator> positionalValidators, final Map<String, OptionValidator> namesToValidators, final String... arguments) { this.namesToValidators = Collections.unmodifiableMap(namesToValidators); this.positionalIterator = positionalValidators.iterator(); this.arguments = new LinkedList<>(Arrays.asList(arguments)); } public boolean isDone() { return currentState == State.DONE; } public void execute() { switch (currentState) { case READ_OPTION: readOption(); break; case READ_ARGUMENT: readArgument(); break; case DONE: break; } } private void readOption() { if (arguments.isEmpty()) { currentState = State.DONE; return; } if (namesToValidators.containsKey(arguments.get(0))) { if (!namesToValidators.containsKey(arguments.get(0))) { unrecognisedOptions.add(arguments.get(0)); arguments.remove(0); return; } validator = OptionParser.this.optionValidators .get(arguments.get(0)); optionName = arguments.get(0); arguments.remove(0); } else { if (!positionalIterator.hasNext()) { unrecognisedOptions.add(arguments.get(0)); arguments.remove(0); return; } validator = positionalIterator.next(); optionName = validator.getName(); } currentState = State.READ_ARGUMENT; parsedArguments = new ArrayList<>(); } private void readArgument() { assert (validator != null); if (arguments.isEmpty() || namesToValidators.containsKey(arguments.get(0)) || parsedArguments.size() >= validator.maximumArgumentCount()) { namesToArgumentCounts.put(optionName, parsedArguments.size()); optionSet.addOption(validator, parsedArguments); currentState = State.READ_OPTION; } else { if (!validator.isArgumentLegal(arguments.get(0))) { if (!badArguments.containsKey(optionName)) { badArguments.put(optionName, new ArrayList<>()); } badArguments.get(optionName).add(arguments.get(0)); arguments.remove(0); return; } parsedArguments.add(arguments.get(0)); arguments.remove(0); } } } }
/* * Copyright 2014. Vladislav Dolgikh * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.elusive_code.newsboy; import java.util.*; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.RecursiveAction; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; import java.util.logging.Logger; /** * <p>Implementation of {@link com.elusive_code.newsboy.EventService} that uses</p> * <ul> * <li>weak references to store subscribers</li> * <li>asynchronous, and uses Fork-Join framework to schedule notifications</li> * </ul> * * @see com.elusive_code.newsboy.EventService * @see com.elusive_code.newsboy.Subscribe * @author Vladislav Dolgikh */ public class AsyncEventService implements EventService { private static final Logger LOG = Logger.getLogger(AsyncEventService.class.getName()); /** * <p>Subscribed listeners. Used for faster unsubscribing</p> * <p>Key - listener object, Value - set of event handlers</p> */ private WeakHashMap<Object, Collection<WeakEventHandler>> listeners = new WeakHashMap<>(); /** * <p>Listeners by event class, used for faster publishing</p> * <p>Key - class of event to handle, Value - set of event handlers from all listeners</p> */ private Map<Class, Set<WeakEventHandler>> listenersByClass = new HashMap<>(); /** * Lock for synchronizing listeners' collections */ private Lock listenersLock = new ReentrantLock(); private PublishAction lastOrderedEvent = null; private ForkJoinPool notificatorPool; private boolean saveEventStackTrace; public AsyncEventService() { this(Runtime.getRuntime().availableProcessors()); } public AsyncEventService(int parallelism) { this(parallelism, true); } public AsyncEventService(int parallelism, boolean saveEventStackTrace) { this.notificatorPool = new ForkJoinPool(parallelism); this.saveEventStackTrace = saveEventStackTrace; } /** * <p>Whether event publishing stack trace is stored</p> * * @see #setSaveEventStackTrace(boolean) * @return true if it is stored */ public boolean isSaveEventStackTrace() { return saveEventStackTrace; } /** * <p> * Defines whether to store event publishing stack trace. * </p> * <p> * If true, then if error occur during event handling it will attempt to add that stack trace * to the one of occurred exception.<br> * Event if it fails, that stack trace will be available through {@link EventNotifierTask#getEventStackTrace()} * </p> * <p> * By default it is true * </p> * @param saveEventStackTrace flag whether to store event stack trace */ public void setSaveEventStackTrace(boolean saveEventStackTrace) { this.saveEventStackTrace = saveEventStackTrace; } @Override public void subscribe(Object object) { if (object == null) return; LinkedList<WeakEventHandler> eventHandlers = EventServiceHelper.createObjectEventHandlers(object); listenersLock.lock(); try { listeners.put(object, eventHandlers); for (WeakEventHandler handler : eventHandlers) { addListenerByClass(handler.getEventType(), handler); } } finally { listenersLock.unlock(); } } @Override public void unsubscribe(Object object) { if (object == null) return; listenersLock.lock(); try { Collection<WeakEventHandler> handlers = listeners.remove(object); if (handlers == null || handlers.size() <= 0) return; for (WeakEventHandler handler : handlers) { removeListenerByClass(handler.getEventType(), handler); } } finally { listenersLock.unlock(); } } /** * <p>Publish event to this EventService.</p> * <p>No delivery order guaranteed.</p> * <p>When using returned futures keep in mind they may fail with * {@link com.elusive_code.newsboy.WeakReferenceCollectedException} * in that case nothing should be done. * Listener was claimed by GC before event handling (but after event scheduling)</p> * @param event event to notify of * @return list of {@link com.elusive_code.newsboy.NotificationFuture} that represent scheduled notifications */ @Override public List<NotificationFuture> publish ( Object event ) { if ( event == null ) return Collections.EMPTY_LIST; EventStackTrace stackTrace = null; if (saveEventStackTrace){ stackTrace = new EventStackTrace(event); } PublishAction task = new PublishAction ( event, stackTrace ); notificatorPool.execute ( task ); return new ArrayList<NotificationFuture>(task.getNotifiers()); } /** * <p>Publish event to this EventService.</p> * <p>Guaranteed to deliver in the same order that was published * relative to other <b>ordered</b> events</p> * <p>When using returned futures keep in mind they may fail with * {@link com.elusive_code.newsboy.WeakReferenceCollectedException} * in that case nothing should be done. * Listener was claimed by GC before event handling (but after event scheduling)</p> * @param event event to notify of * @return list of {@link com.elusive_code.newsboy.NotificationFuture} that represent scheduled notifications */ @Override @Subscribe public List<NotificationFuture> publishOrdered ( Object event ) { if ( event == null ) return Collections.EMPTY_LIST; EventStackTrace stackTrace = null; if (saveEventStackTrace){ stackTrace = new EventStackTrace(event); } lastOrderedEvent = new PublishAction ( event, lastOrderedEvent, true, stackTrace ); notificatorPool.execute ( lastOrderedEvent ); return new ArrayList<NotificationFuture>(lastOrderedEvent.getNotifiers()); } private void addListenerByClass (Class clazz, WeakEventHandler handler) { Set handlers = listenersByClass.get(clazz); if (handlers == null) { handlers = new HashSet(); listenersByClass.put(clazz,handlers); } handlers.add(handler); } private void removeListenerByClass(Class clazz, WeakEventHandler handler) { Set<WeakEventHandler> handlers = listenersByClass.get(clazz); if (handlers == null) return; handlers.remove(handler); } /** * Task that initiates event notifications and handles ordering */ protected class PublishAction extends RecursiveAction { private Object event; private List<EventNotifierTask> notifiers; private PublishAction previousEvent; private boolean ordered; private EventStackTrace stackTrace; public PublishAction(Object event, EventStackTrace stackTrace) { this(event, null, false, stackTrace); } public PublishAction(Object event, PublishAction previousEvent, boolean ordered, EventStackTrace stackTrace) { this.event = event; this.ordered = ordered; this.previousEvent = previousEvent; this.stackTrace = stackTrace; this.notifiers = Collections.unmodifiableList(collectNotifiers()); } public List<EventNotifierTask> getNotifiers() { return notifiers; } /** * Collects all notifiers for current event * @return list of notification tasks */ private LinkedList<EventNotifierTask> collectNotifiers() { LinkedList<EventNotifierTask> notifiers = new LinkedList<>(); listenersLock.lock(); try { Set<Class> classes = EventServiceHelper.collectClassHierarchy(event.getClass()); for (Class clazz : classes) { Set<WeakEventHandler> handlers = listenersByClass.get(clazz); if (handlers != null) { Iterator<WeakEventHandler> i = handlers.iterator(); while (i.hasNext()) { WeakEventHandler eventHandler = i.next(); Object listener = eventHandler.getTarget(); if (listener == null) { //listener collected by GC i.remove(); } else { EventNotifierTask task = new EventNotifierTask(eventHandler, event, AsyncEventService.this, stackTrace); notifiers.add(task); } } } } return notifiers; } finally { listenersLock.unlock(); } } protected void compute() { try { //if event ordered and it's not first one wait for it's turn if (previousEvent != null) { previousEvent.quietlyJoin(); } //scheduling notification for (EventNotifierTask task : getNotifiers()) { task.fork(); } //if event ordered we should wait for notifications to complete //so that next event won't fire until we notify of this one if (ordered) { for (EventNotifierTask task : getNotifiers()) { task.quietlyJoin(); } } } finally { //for processed events we need to set previous to null to prevent memory leak //(chaining events with hard references like current event->prev->prev->.....->first event) previousEvent = null; } } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.bookmarks; import com.intellij.ide.IdeBundle; import com.intellij.ide.ui.UISettings; import com.intellij.ide.ui.UISettingsListener; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.*; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.colors.EditorColorsListener; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.event.*; import com.intellij.openapi.editor.ex.MarkupModelEx; import com.intellij.openapi.editor.impl.DocumentMarkupModel; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.DumbAwareRunnable; import com.intellij.openapi.project.Project; import com.intellij.openapi.startup.StartupManager; import com.intellij.openapi.ui.InputValidator; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.util.messages.MessageBus; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.UIUtil; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.InputEvent; import java.util.*; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; @State( name = "BookmarkManager", storages = { @Storage(StoragePathMacros.WORKSPACE_FILE) } ) public class BookmarkManager implements PersistentStateComponent<Element> { private static final int MAX_AUTO_DESCRIPTION_SIZE = 50; private static final Key<List<Bookmark>> BOOKMARKS_KEY = Key.create("bookmarks"); private final List<Bookmark> myBookmarks = new CopyOnWriteArrayList<>(); private final Map<Trinity<VirtualFile, Integer, String>, Bookmark> myDeletedDocumentBookmarks = new HashMap<>(); private final Map<Document, List<Trinity<Bookmark, Integer, String>>> myBeforeChangeData = new HashMap<>(); private final MessageBus myBus; private final Project myProject; private boolean mySortedState; public static BookmarkManager getInstance(Project project) { return ServiceManager.getService(project, BookmarkManager.class); } public BookmarkManager(Project project, PsiDocumentManager documentManager, EditorColorsManager colorsManager, EditorFactory editorFactory) { myProject = project; myBus = project.getMessageBus(); MessageBusConnection connection = project.getMessageBus().connect(); connection.subscribe(EditorColorsManager.TOPIC, new EditorColorsListener() { @Override public void globalSchemeChange(EditorColorsScheme scheme) { colorsChanged(); } }); EditorEventMulticaster multicaster = editorFactory.getEventMulticaster(); multicaster.addDocumentListener(new MyDocumentListener(), myProject); multicaster.addEditorMouseListener(new MyEditorMouseListener(), myProject); documentManager.addListener(new PsiDocumentManager.Listener() { @Override public void documentCreated(@NotNull final Document document, PsiFile psiFile) { final VirtualFile file = FileDocumentManager.getInstance().getFile(document); if (file == null) return; for (final Bookmark bookmark : myBookmarks) { if (Comparing.equal(bookmark.getFile(), file)) { UIUtil.invokeLaterIfNeeded(() -> { if (myProject.isDisposed()) return; bookmark.createHighlighter((MarkupModelEx)DocumentMarkupModel.forDocument(document, myProject, true)); map(document, bookmark); }); } } } @Override public void fileCreated(@NotNull PsiFile file, @NotNull Document document) { } }); mySortedState = UISettings.getInstance().getSortBookmarks(); connection.subscribe(UISettingsListener.TOPIC, uiSettings -> { if (mySortedState != uiSettings.getSortBookmarks()) { mySortedState = uiSettings.getSortBookmarks(); EventQueue.invokeLater(() -> myBus.syncPublisher(BookmarksListener.TOPIC).bookmarksOrderChanged()); } }); } private static void map(Document document, Bookmark bookmark) { if (document == null || bookmark == null) return; ApplicationManager.getApplication().assertIsDispatchThread(); List<Bookmark> list = document.getUserData(BOOKMARKS_KEY); if (list == null) { document.putUserData(BOOKMARKS_KEY, list = new ArrayList<>()); } list.add(bookmark); } private static void unmap(Document document, Bookmark bookmark) { if (document == null || bookmark == null) return; List<Bookmark> list = document.getUserData(BOOKMARKS_KEY); if (list != null && list.remove(bookmark) && list.isEmpty()) { document.putUserData(BOOKMARKS_KEY, null); } } public void editDescription(@NotNull Bookmark bookmark, JComponent popup) { String description = Messages .showInputDialog(popup, IdeBundle.message("action.bookmark.edit.description.dialog.message"), IdeBundle.message("action.bookmark.edit.description.dialog.title"), Messages.getQuestionIcon(), bookmark.getDescription(), new InputValidator() { @Override public boolean checkInput(String inputString) { return true; } @Override public boolean canClose(String inputString) { return true; } }); if (description != null) { setDescription(bookmark, description); } } public void addEditorBookmark(@NotNull Editor editor, int lineIndex) { Document document = editor.getDocument(); PsiFile psiFile = PsiDocumentManager.getInstance(myProject).getPsiFile(document); if (psiFile == null) return; final VirtualFile virtualFile = psiFile.getVirtualFile(); if (virtualFile == null) return; addTextBookmark(virtualFile, lineIndex, getAutoDescription(editor, lineIndex)); } @NotNull public Bookmark addTextBookmark(@NotNull VirtualFile file, int lineIndex, @NotNull String description) { Bookmark b = new Bookmark(myProject, file, lineIndex, description); myBookmarks.add(0, b); map(b.getDocument(), b); myBus.syncPublisher(BookmarksListener.TOPIC).bookmarkAdded(b); return b; } @NotNull public static String getAutoDescription(@NotNull final Editor editor, final int lineIndex) { String autoDescription = editor.getSelectionModel().getSelectedText(); if (autoDescription == null) { Document document = editor.getDocument(); autoDescription = document.getCharsSequence() .subSequence(document.getLineStartOffset(lineIndex), document.getLineEndOffset(lineIndex)).toString().trim(); } if (autoDescription.length() > MAX_AUTO_DESCRIPTION_SIZE) { return autoDescription.substring(0, MAX_AUTO_DESCRIPTION_SIZE) + "..."; } return autoDescription; } @Nullable public Bookmark addFileBookmark(@Nullable VirtualFile file, @NotNull String description) { if (file == null) return null; if (findFileBookmark(file) != null) return null; Bookmark b = new Bookmark(myProject, file, -1, description); myBookmarks.add(0, b); myBus.syncPublisher(BookmarksListener.TOPIC).bookmarkAdded(b); return b; } @NotNull public List<Bookmark> getValidBookmarks() { List<Bookmark> answer = new ArrayList<>(); for (Bookmark bookmark : myBookmarks) { if (bookmark.isValid()) answer.add(bookmark); } if (UISettings.getInstance().getSortBookmarks()) { Collections.sort(answer); } return answer; } @Nullable public Bookmark findEditorBookmark(@NotNull Document document, int line) { List<Bookmark> bookmarks = document.getUserData(BOOKMARKS_KEY); if (bookmarks != null) { for (Bookmark bookmark : bookmarks) { if (bookmark.getLine() == line) { return bookmark; } } } return null; } @Nullable public Bookmark findFileBookmark(@NotNull VirtualFile file) { for (Bookmark bookmark : myBookmarks) { if (Comparing.equal(bookmark.getFile(), file) && bookmark.getLine() == -1) return bookmark; } return null; } @Nullable public Bookmark findBookmarkForMnemonic(char m) { final char mm = Character.toUpperCase(m); for (Bookmark bookmark : myBookmarks) { if (mm == bookmark.getMnemonic()) return bookmark; } return null; } public boolean hasBookmarksWithMnemonics() { for (Bookmark bookmark : myBookmarks) { if (bookmark.getMnemonic() != 0) return true; } return false; } public void removeBookmark(@NotNull Bookmark bookmark) { if (myBookmarks.remove(bookmark)) { unmap(bookmark.getDocument(), bookmark); bookmark.release(); myBus.syncPublisher(BookmarksListener.TOPIC).bookmarkRemoved(bookmark); } } @Override public Element getState() { Element container = new Element("BookmarkManager"); writeExternal(container); return container; } @Override public void loadState(final Element state) { StartupManager.getInstance(myProject).runWhenProjectIsInitialized((DumbAwareRunnable)() -> { for (Bookmark bookmark : myBookmarks) { bookmark.release(); unmap(bookmark.getDocument(), bookmark); } myBookmarks.clear(); readExternal(state); }); } private void readExternal(Element element) { for (final Object o : element.getChildren()) { Element bookmarkElement = (Element)o; if ("bookmark".equals(bookmarkElement.getName())) { String url = bookmarkElement.getAttributeValue("url"); String line = bookmarkElement.getAttributeValue("line"); String description = StringUtil.notNullize(bookmarkElement.getAttributeValue("description")); String mnemonic = bookmarkElement.getAttributeValue("mnemonic"); Bookmark b = null; VirtualFile file = VirtualFileManager.getInstance().findFileByUrl(url); if (file != null) { if (line != null) { try { int lineIndex = Integer.parseInt(line); b = addTextBookmark(file, lineIndex, description); } catch (NumberFormatException e) { // Ignore. Will miss bookmark if line number cannot be parsed } } else { b = addFileBookmark(file, description); } } if (b != null && mnemonic != null && mnemonic.length() == 1) { setMnemonic(b, mnemonic.charAt(0)); } } } } private void writeExternal(Element element) { List<Bookmark> reversed = new ArrayList<>(myBookmarks); Collections.reverse(reversed); for (Bookmark bookmark : reversed) { if (!bookmark.isValid()) continue; Element bookmarkElement = new Element("bookmark"); bookmarkElement.setAttribute("url", bookmark.getFile().getUrl()); String description = bookmark.getNotEmptyDescription(); if (description != null) { bookmarkElement.setAttribute("description", description); } int line = bookmark.getLine(); if (line >= 0) { bookmarkElement.setAttribute("line", String.valueOf(line)); } char mnemonic = bookmark.getMnemonic(); if (mnemonic != 0) { bookmarkElement.setAttribute("mnemonic", String.valueOf(mnemonic)); } element.addContent(bookmarkElement); } } /** * Try to move bookmark one position up in the list * * @return bookmark list after moving */ @NotNull public List<Bookmark> moveBookmarkUp(@NotNull Bookmark bookmark) { final int index = myBookmarks.indexOf(bookmark); if (index > 0) { Collections.swap(myBookmarks, index, index - 1); EventQueue.invokeLater(() -> { myBus.syncPublisher(BookmarksListener.TOPIC).bookmarkChanged(myBookmarks.get(index)); myBus.syncPublisher(BookmarksListener.TOPIC).bookmarkChanged(myBookmarks.get(index - 1)); }); } return myBookmarks; } /** * Try to move bookmark one position down in the list * * @return bookmark list after moving */ @NotNull public List<Bookmark> moveBookmarkDown(@NotNull Bookmark bookmark) { final int index = myBookmarks.indexOf(bookmark); if (index < myBookmarks.size() - 1) { Collections.swap(myBookmarks, index, index + 1); EventQueue.invokeLater(() -> { myBus.syncPublisher(BookmarksListener.TOPIC).bookmarkChanged(myBookmarks.get(index)); myBus.syncPublisher(BookmarksListener.TOPIC).bookmarkChanged(myBookmarks.get(index + 1)); }); } return myBookmarks; } @Nullable public Bookmark findLineBookmark(@NotNull Editor editor, boolean isWrapped, boolean next) { List<Bookmark> bookmarksForDocument = editor.getDocument().getUserData(BOOKMARKS_KEY); if (bookmarksForDocument == null) return null; int sign = next ? 1 : -1; Collections.sort(bookmarksForDocument, (o1, o2) -> sign * (o1.getLine() - o2.getLine())); int caretLine = editor.getCaretModel().getLogicalPosition().line; for (Bookmark bookmark : bookmarksForDocument) { if (next && bookmark.getLine() > caretLine) return bookmark; if (!next && bookmark.getLine() < caretLine) return bookmark; } return isWrapped && !bookmarksForDocument.isEmpty() ? bookmarksForDocument.get(0) : null; } public void setMnemonic(@NotNull Bookmark bookmark, char c) { final Bookmark old = findBookmarkForMnemonic(c); if (old != null) removeBookmark(old); bookmark.setMnemonic(c); myBus.syncPublisher(BookmarksListener.TOPIC).bookmarkChanged(bookmark); } public void setDescription(@NotNull Bookmark bookmark, String description) { bookmark.setDescription(description); myBus.syncPublisher(BookmarksListener.TOPIC).bookmarkChanged(bookmark); } public void colorsChanged() { for (Bookmark bookmark : myBookmarks) { bookmark.updateHighlighter(); } } private class MyEditorMouseListener extends EditorMouseAdapter { @Override public void mouseClicked(final EditorMouseEvent e) { if (e.getArea() != EditorMouseEventArea.LINE_MARKERS_AREA) return; if (e.getMouseEvent().isPopupTrigger()) return; if ((e.getMouseEvent().getModifiers() & (SystemInfo.isMac ? InputEvent.META_MASK : InputEvent.CTRL_MASK)) == 0) return; Editor editor = e.getEditor(); int line = editor.xyToLogicalPosition(new Point(e.getMouseEvent().getX(), e.getMouseEvent().getY())).line; if (line < 0) return; Document document = editor.getDocument(); Bookmark bookmark = findEditorBookmark(document, line); if (bookmark == null) { addEditorBookmark(editor, line); } else { removeBookmark(bookmark); } e.consume(); } } private class MyDocumentListener implements DocumentListener { @Override public void beforeDocumentChange(DocumentEvent e) { for (Bookmark bookmark : myBookmarks) { Document doc = bookmark.getDocument(); if (doc == null || doc != e.getDocument()) continue; if (bookmark.getLine() == -1) continue; List<Trinity<Bookmark, Integer, String>> list = myBeforeChangeData.get(doc); if (list == null) { myBeforeChangeData.put(doc, list = new ArrayList<>()); } list.add(new Trinity<>(bookmark, bookmark.getLine(), doc.getText(new TextRange(doc.getLineStartOffset(bookmark.getLine()), doc.getLineEndOffset(bookmark.getLine()))))); } } private boolean isDuplicate(Bookmark bookmark, @Nullable List<Bookmark> toRemove) { for (Bookmark b : myBookmarks) { if (b == bookmark) continue; if (!b.isValid()) continue; if (Comparing.equal(b.getFile(), bookmark.getFile()) && b.getLine() == bookmark.getLine()) { if (toRemove == null || !toRemove.contains(b)) { return true; } } } return false; } private void moveToDeleted(Bookmark bookmark) { List<Trinity<Bookmark, Integer, String>> list = myBeforeChangeData.get(bookmark.getDocument()); if (list != null) { for (Trinity<Bookmark, Integer, String> trinity : list) { if (trinity.first == bookmark) { removeBookmark(bookmark); myDeletedDocumentBookmarks.put(new Trinity<>(bookmark.getFile(), trinity.second, trinity.third), bookmark); break; } } } } @Override public void documentChanged(DocumentEvent e) { List<Bookmark> bookmarksToRemove = null; for (Bookmark bookmark : myBookmarks) { if (!bookmark.isValid() || isDuplicate(bookmark, bookmarksToRemove)) { if (bookmarksToRemove == null) { bookmarksToRemove = new ArrayList<>(); } bookmarksToRemove.add(bookmark); } } if (bookmarksToRemove != null) { for (Bookmark bookmark : bookmarksToRemove) { if (bookmark.getDocument() == e.getDocument()) { moveToDeleted(bookmark); } else { removeBookmark(bookmark); } } } myBeforeChangeData.remove(e.getDocument()); for (Iterator<Map.Entry<Trinity<VirtualFile, Integer, String>, Bookmark>> iterator = myDeletedDocumentBookmarks.entrySet().iterator(); iterator.hasNext(); ) { Map.Entry<Trinity<VirtualFile, Integer, String>, Bookmark> entry = iterator.next(); if (!entry.getKey().first.isValid()) { iterator.remove(); continue; } Bookmark bookmark = entry.getValue(); Document document = bookmark.getDocument(); if (document == null || !bookmark.getFile().equals(entry.getKey().first)) { continue; } Integer line = entry.getKey().second; if (document.getLineCount() <= line) { continue; } String lineContent = getLineContent(document, line); String bookmarkedText = entry.getKey().third; //'move statement up' action kills line bookmark: fix for single line movement up/down if (!bookmarkedText.equals(lineContent) && line > 1 && (bookmarkedText.equals(StringUtil.trimEnd(e.getNewFragment().toString(), "\n")) || bookmarkedText.equals(StringUtil.trimEnd(e.getOldFragment().toString(), "\n")))) { line -= 2; lineContent = getLineContent(document, line); } if (bookmarkedText.equals(lineContent) && findEditorBookmark(document, line) == null) { Bookmark restored = addTextBookmark(bookmark.getFile(), line, bookmark.getDescription()); if (bookmark.getMnemonic() != 0) { setMnemonic(restored, bookmark.getMnemonic()); } iterator.remove(); } } } private String getLineContent(Document document, int line) { int start = document.getLineStartOffset(line); int end = document.getLineEndOffset(line); return document.getText(new TextRange(start, end)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.stratos.integration.tests.application; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.stratos.common.beans.application.ApplicationBean; import org.apache.stratos.common.beans.policy.deployment.ApplicationPolicyBean; import org.apache.stratos.integration.common.RestConstants; import org.apache.stratos.integration.common.TopologyHandler; import org.apache.stratos.integration.tests.StratosIntegrationTest; import org.apache.stratos.messaging.domain.topology.Member; import org.apache.stratos.metadata.client.beans.PropertyBean; import org.apache.stratos.mock.iaas.domain.MockInstanceMetadata; import org.testng.Assert; import org.testng.annotations.Test; import java.util.*; import static org.testng.Assert.*; import static org.testng.AssertJUnit.assertFalse; import static org.testng.AssertJUnit.assertTrue; /** * Deploy a sample application on mock IaaS and assert whether application instance, cluster instance, member instances * are getting activated. Kill the mock instance and check whether */ @Test(groups = { "application", "smoke" }) public class SampleApplicationStartupTestCase extends StratosIntegrationTest { private static final Log log = LogFactory.getLog(SampleApplicationStartupTestCase.class); private static final String RESOURCES_PATH = "/sample-application-startup-test"; private TopologyHandler topologyHandler = TopologyHandler.getInstance(); private static final String PAYLOAD_PARAMETER_SEPARATOR = ","; private static final String PAYLOAD_PARAMETER_NAME_VALUE_SEPARATOR = "="; private static final String PAYLOAD_PARAMETER_TOKEN_KEY = "TOKEN"; private static final String PAYLOAD_PARAMETER_APPLICATION_ID_KEY = "APPLICATION_ID"; private GsonBuilder gsonBuilder = new GsonBuilder(); private Gson gson = gsonBuilder.create(); @Test(timeOut = DEFAULT_APPLICATION_TEST_TIMEOUT, description = "Application startup, activation and faulty member detection") public void testApplication() throws Exception { log.info("Running SampleApplicationStartupTestCase.testApplication test method..."); long startTime = System.currentTimeMillis(); String autoscalingPolicyId = "autoscaling-policy-sample-application-startup-test"; log.info("Adding autoscaling policy [autoscale policy id] " + autoscalingPolicyId); boolean addedScalingPolicy = restClient.addEntity( RESOURCES_PATH + RestConstants.AUTOSCALING_POLICIES_PATH + "/" + autoscalingPolicyId + ".json", RestConstants.AUTOSCALING_POLICIES, RestConstants.AUTOSCALING_POLICIES_NAME); assertTrue(addedScalingPolicy); log.info("Adding cartridge [cartridge type] c1-sample-application-startup-test"); boolean addedC1 = restClient.addEntity( RESOURCES_PATH + RestConstants.CARTRIDGES_PATH + "/" + "c1-sample-application-startup-test.json", RestConstants.CARTRIDGES, RestConstants.CARTRIDGES_NAME); assertTrue(addedC1); log.info("Adding network partition [network partition id] sample-application-startup-test"); boolean addedN1 = restClient.addEntity(RESOURCES_PATH + RestConstants.NETWORK_PARTITIONS_PATH + "/" + "network-partition-sample-application-startup-test.json", RestConstants.NETWORK_PARTITIONS, RestConstants.NETWORK_PARTITIONS_NAME); assertTrue(addedN1); log.info("Adding deployment policy [deployment policy id] deployment-policy-sample-application-startup-test"); boolean addedDep = restClient.addEntity(RESOURCES_PATH + RestConstants.DEPLOYMENT_POLICIES_PATH + "/" + "deployment-policy-sample-application-startup-test.json", RestConstants.DEPLOYMENT_POLICIES, RestConstants.DEPLOYMENT_POLICIES_NAME); assertTrue(addedDep); log.info("Adding application [application id] sample-application-startup-test"); boolean addedApp = restClient.addEntity(RESOURCES_PATH + RestConstants.APPLICATIONS_PATH + "/" + "sample-application-startup-test.json", RestConstants.APPLICATIONS, RestConstants.APPLICATIONS_NAME); Assert.assertTrue(addedApp); ApplicationBean bean = (ApplicationBean) restClient .getEntity(RestConstants.APPLICATIONS, "sample-application-startup-test", ApplicationBean.class, RestConstants.APPLICATIONS_NAME); assertEquals(bean.getApplicationId(), "sample-application-startup-test"); log.info( "Adding application policy [application policy id] application-policy-sample-application-startup-test"); boolean addAppPolicy = restClient.addEntity(RESOURCES_PATH + RestConstants.APPLICATION_POLICIES_PATH + "/" + "application-policy-sample-application-startup-test.json", RestConstants.APPLICATION_POLICIES, RestConstants.APPLICATION_POLICIES_NAME); assertTrue(addAppPolicy); ApplicationPolicyBean policyBean = (ApplicationPolicyBean) restClient .getEntity(RestConstants.APPLICATION_POLICIES, "application-policy-sample-application-startup-test", ApplicationPolicyBean.class, RestConstants.APPLICATION_POLICIES_NAME); assertEquals(policyBean.getId(), "application-policy-sample-application-startup-test"); // Used policies/cartridges should not removed...asserting validations when removing policies log.info("Trying to remove used autoscaling policy..."); boolean removedUsedAuto = restClient.removeEntity(RestConstants.AUTOSCALING_POLICIES, autoscalingPolicyId, RestConstants.AUTOSCALING_POLICIES_NAME); assertFalse(removedUsedAuto); log.info("Trying to remove used network partition..."); boolean removedUsedNet = restClient .removeEntity(RestConstants.NETWORK_PARTITIONS, "network-partition-sample-application-startup-test", RestConstants.NETWORK_PARTITIONS_NAME); assertFalse(removedUsedNet); log.info("Trying to remove used deployment policy..."); boolean removedUsedDep = restClient .removeEntity(RestConstants.DEPLOYMENT_POLICIES, "deployment-policy-sample-application-startup-test", RestConstants.DEPLOYMENT_POLICIES_NAME); assertFalse(removedUsedDep); log.info("Deploying application [application id] sample-application-startup-test using [application policy id] " + "application-policy-sample-application-startup-test"); String resourcePath = RestConstants.APPLICATIONS + "/sample-application-startup-test" + RestConstants.APPLICATIONS_DEPLOY + "/application-policy-sample-application-startup-test"; boolean deployed = restClient.deployEntity(resourcePath, RestConstants.APPLICATIONS_NAME); assertTrue(deployed); log.info("Trying to remove the used application policy"); boolean removedUsedAppPolicy = restClient .removeEntity(RestConstants.APPLICATION_POLICIES, "application-policy-sample-application-startup-test", RestConstants.APPLICATION_POLICIES_NAME); assertFalse(removedUsedAppPolicy); log.info("Trying to remove the deployed application without undeploying first"); boolean removed = restClient.removeEntity(RestConstants.APPLICATIONS, "sample-application-startup-test", RestConstants.APPLICATIONS_NAME); assertFalse(removed); log.info("Waiting for application status to become ACTIVE..."); //topologyHandler.assertApplicationStatus(bean.getApplicationId(), ApplicationStatus.Active); TopologyHandler.getInstance().assertApplicationActiveStatus(bean.getApplicationId()); log.info("Waiting for cluster status to become ACTIVE..."); topologyHandler.assertClusterActivation(bean.getApplicationId()); List<Member> memberList = topologyHandler.getMembersForApplication(bean.getApplicationId()); Assert.assertTrue(memberList.size() > 1, String.format("Active member list for application %s is empty", bean.getApplicationId())); MockInstanceMetadata mockInstanceMetadata = mockIaasApiClient.getInstance(memberList.get(0).getMemberId()); String payloadString = mockInstanceMetadata.getPayload(); log.info("Mock instance payload properties: " + payloadString); Properties payloadProperties = new Properties(); String[] parameterArray = payloadString.split(PAYLOAD_PARAMETER_SEPARATOR); for (String parameter : parameterArray) { if (parameter != null) { String[] nameValueArray = parameter.split(PAYLOAD_PARAMETER_NAME_VALUE_SEPARATOR, 2); if ((nameValueArray.length == 2)) { payloadProperties.put(nameValueArray[0], nameValueArray[1]); } } } String key = "mykey"; String val1 = "myval1"; String val2 = "myval2"; String accessToken = payloadProperties.getProperty(PAYLOAD_PARAMETER_TOKEN_KEY); String appId = payloadProperties.getProperty(PAYLOAD_PARAMETER_APPLICATION_ID_KEY); assertNotNull(accessToken, "Access token is null in member payload"); log.info("Trying to add metadata for application:" + appId + ", with accessToken: " + accessToken); boolean hasProperty1Added = restClient.addPropertyToApplication(appId, key, val1, accessToken); Assert.assertTrue(hasProperty1Added, "Could not add metadata property1 to application: " + appId); boolean hasProperty2Added = restClient.addPropertyToApplication(appId, key, val2, accessToken); Assert.assertTrue(hasProperty2Added, "Could not add metadata property2 to application: " + appId); PropertyBean propertyBean = restClient.getApplicationProperty(appId, key, accessToken); log.info("Retrieved metadata property: " + gson.toJson(propertyBean)); Assert.assertTrue(propertyBean != null && propertyBean.getValues().size() > 0, "Empty property list"); List<String> addedValues = new ArrayList<>(Arrays.asList(val1, val2)); boolean hasPropertiesAdded = propertyBean.getValues().containsAll(addedValues); Assert.assertTrue(hasPropertiesAdded, "Metadata properties retrieved are not correct"); log.info("Terminating members in [cluster id] c1-sample-application-startup-test in mock IaaS directly to " + "simulate faulty members..."); Map<String, Member> memberMap = TopologyHandler.getInstance() .getMembersForCluster("c1-sample-application-startup-test", bean.getApplicationId()); for (Map.Entry<String, Member> entry : memberMap.entrySet()) { String memberId = entry.getValue().getMemberId(); TopologyHandler.getInstance().terminateMemberInMockIaas(memberId, mockIaasApiClient); TopologyHandler.getInstance().assertMemberTermination(memberId); } // application status should be marked as inactive since some members are faulty log.info("Waiting for application status to become INACTIVE"); TopologyHandler.getInstance().assertApplicationActiveStatus(bean.getApplicationId()); // application should recover itself and become active after spinning more instances log.info("Waiting for application status to become ACTIVE..."); //topologyHandler.assertApplicationStatus(bean.getApplicationId(), ApplicationStatus.Active); TopologyHandler.getInstance().assertApplicationActiveStatus(bean.getApplicationId()); log.info("Waiting for cluster status to become ACTIVE..."); topologyHandler.assertClusterActivation(bean.getApplicationId()); log.info("Un-deploying the application [application id] sample-application-startup-test"); String resourcePathUndeploy = RestConstants.APPLICATIONS + "/sample-application-startup-test" + RestConstants.APPLICATIONS_UNDEPLOY; boolean unDeployed = restClient.undeployEntity(resourcePathUndeploy, RestConstants.APPLICATIONS_NAME); assertTrue(unDeployed); boolean undeploy = topologyHandler.assertApplicationUndeploy("sample-application-startup-test"); if (!undeploy) { //Need to forcefully undeploy the application log.info("Force undeployment is going to start for the [application] sample-application-startup-test"); restClient.undeployEntity(RestConstants.APPLICATIONS + "/sample-application-startup-test" + RestConstants.APPLICATIONS_UNDEPLOY + "?force=true", RestConstants.APPLICATIONS); boolean forceUndeployed = topologyHandler.assertApplicationUndeploy("sample-application-startup-test"); assertTrue(String.format("Forceful undeployment failed for the application %s", "sample-application-startup-test"), forceUndeployed); } log.info("Removing application [application id] sample-application-startup-test"); boolean removedApp = restClient.removeEntity(RestConstants.APPLICATIONS, "sample-application-startup-test", RestConstants.APPLICATIONS_NAME); assertTrue(removedApp); ApplicationBean beanRemoved = (ApplicationBean) restClient .getEntity(RestConstants.APPLICATIONS, "sample-application-startup-test", ApplicationBean.class, RestConstants.APPLICATIONS_NAME); assertNull(beanRemoved); log.info("Removing application policy [application policy id] " + "application-policy-sample-application-startup-test"); boolean removeAppPolicy = restClient .removeEntity(RestConstants.APPLICATION_POLICIES, "application-policy-sample-application-startup-test", RestConstants.APPLICATION_POLICIES_NAME); assertTrue(removeAppPolicy); log.info("Removing cartridge [cartridge type] c1-sample-application-startup-test"); boolean removedC1 = restClient.removeEntity(RestConstants.CARTRIDGES, "c1-sample-application-startup-test", RestConstants.CARTRIDGES_NAME); assertTrue(removedC1); log.info("Removing autoscaling policy [autoscaling policy id] " + autoscalingPolicyId); boolean removedAuto = restClient.removeEntity(RestConstants.AUTOSCALING_POLICIES, autoscalingPolicyId, RestConstants.AUTOSCALING_POLICIES_NAME); assertTrue(removedAuto); log.info("Removing deployment policy [deployment policy id] " + "deployment-policy-sample-application-startup-test"); boolean removedDep = restClient .removeEntity(RestConstants.DEPLOYMENT_POLICIES, "deployment-policy-sample-application-startup-test", RestConstants.DEPLOYMENT_POLICIES_NAME); assertTrue(removedDep); log.info("Removing network partition [network partition id] network-partition-sample-application-startup-test"); boolean removedNet = restClient .removeEntity(RestConstants.NETWORK_PARTITIONS, "network-partition-sample-application-startup-test", RestConstants.NETWORK_PARTITIONS_NAME); assertTrue(removedNet); long duration = System.currentTimeMillis() - startTime; log.info(String.format("SampleApplicationStartupTestCase completed in [duration] %s ms", duration)); } }
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.publish.maven.internal.publication; import org.gradle.api.Action; import org.gradle.api.XmlProvider; import org.gradle.api.internal.UserCodeAction; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.MapProperty; import org.gradle.api.provider.Property; import org.gradle.api.publish.internal.versionmapping.VersionMappingStrategyInternal; import org.gradle.api.publish.maven.MavenDependency; import org.gradle.api.publish.maven.MavenPomCiManagement; import org.gradle.api.publish.maven.MavenPomContributor; import org.gradle.api.publish.maven.MavenPomContributorSpec; import org.gradle.api.publish.maven.MavenPomDeveloper; import org.gradle.api.publish.maven.MavenPomDeveloperSpec; import org.gradle.api.publish.maven.MavenPomDistributionManagement; import org.gradle.api.publish.maven.MavenPomIssueManagement; import org.gradle.api.publish.maven.MavenPomLicense; import org.gradle.api.publish.maven.MavenPomLicenseSpec; import org.gradle.api.publish.maven.MavenPomMailingList; import org.gradle.api.publish.maven.MavenPomMailingListSpec; import org.gradle.api.publish.maven.MavenPomOrganization; import org.gradle.api.publish.maven.MavenPomScm; import org.gradle.api.publish.maven.internal.dependencies.MavenDependencyInternal; import org.gradle.api.publish.maven.internal.publisher.MavenProjectIdentity; import org.gradle.internal.MutableActionSet; import org.gradle.internal.reflect.Instantiator; import java.util.ArrayList; import java.util.List; import java.util.Set; public class DefaultMavenPom implements MavenPomInternal, MavenPomLicenseSpec, MavenPomDeveloperSpec, MavenPomContributorSpec, MavenPomMailingListSpec { private final MutableActionSet<XmlProvider> xmlAction = new MutableActionSet<XmlProvider>(); private final MavenPublicationInternal mavenPublication; private final Instantiator instantiator; private final ObjectFactory objectFactory; private String packaging; private Property<String> name; private Property<String> description; private Property<String> url; private Property<String> inceptionYear; private final List<MavenPomLicense> licenses = new ArrayList<MavenPomLicense>(); private MavenPomOrganization organization; private final List<MavenPomDeveloper> developers = new ArrayList<MavenPomDeveloper>(); private final List<MavenPomContributor> contributors = new ArrayList<MavenPomContributor>(); private MavenPomScm scm; private MavenPomIssueManagement issueManagement; private MavenPomCiManagement ciManagement; private MavenPomDistributionManagementInternal distributionManagement; private final List<MavenPomMailingList> mailingLists = new ArrayList<MavenPomMailingList>(); private final MapProperty<String, String> properties; public DefaultMavenPom(MavenPublicationInternal mavenPublication, Instantiator instantiator, ObjectFactory objectFactory) { this.mavenPublication = mavenPublication; this.instantiator = instantiator; this.objectFactory = objectFactory; this.name = objectFactory.property(String.class); this.description = objectFactory.property(String.class); this.url = objectFactory.property(String.class); this.inceptionYear = objectFactory.property(String.class); this.properties = objectFactory.mapProperty(String.class, String.class); } @Override public void withXml(Action<? super XmlProvider> action) { xmlAction.add(new UserCodeAction<XmlProvider>("Could not apply withXml() to generated POM", action)); } @Override public Action<XmlProvider> getXmlAction() { return xmlAction; } @Override public VersionMappingStrategyInternal getVersionMappingStrategy() { return mavenPublication.getVersionMappingStrategy(); } @Override public boolean writeGradleMetadataMarker() { return mavenPublication.writeGradleMetadataMarker(); } @Override public String getPackaging() { if (packaging == null) { return mavenPublication.determinePackagingFromArtifacts(); } return packaging; } @Override public void setPackaging(String packaging) { this.packaging = packaging; } @Override public Property<String> getName() { return name; } @Override public Property<String> getDescription() { return description; } @Override public Property<String> getUrl() { return url; } @Override public Property<String> getInceptionYear() { return inceptionYear; } @Override public void licenses(Action<? super MavenPomLicenseSpec> action) { action.execute(this); } @Override public void license(Action<? super MavenPomLicense> action) { configureAndAdd(DefaultMavenPomLicense.class, action, licenses); } @Override public List<MavenPomLicense> getLicenses() { return licenses; } @Override public void organization(Action<? super MavenPomOrganization> action) { if (organization == null) { organization = instantiator.newInstance(DefaultMavenPomOrganization.class, objectFactory); } action.execute(organization); } @Override public MavenPomOrganization getOrganization() { return organization; } @Override public void developers(Action<? super MavenPomDeveloperSpec> action) { action.execute(this); } @Override public void developer(Action<? super MavenPomDeveloper> action) { configureAndAdd(DefaultMavenPomDeveloper.class, action, developers); } @Override public List<MavenPomDeveloper> getDevelopers() { return developers; } @Override public void contributors(Action<? super MavenPomContributorSpec> action) { action.execute(this); } @Override public void contributor(Action<? super MavenPomContributor> action) { configureAndAdd(DefaultMavenPomDeveloper.class, action, contributors); } @Override public List<MavenPomContributor> getContributors() { return contributors; } @Override public MavenPomScm getScm() { return scm; } @Override public void scm(Action<? super MavenPomScm> action) { if (scm == null) { scm = instantiator.newInstance(DefaultMavenPomScm.class, objectFactory); } action.execute(scm); } @Override public void issueManagement(Action<? super MavenPomIssueManagement> action) { if (issueManagement == null) { issueManagement = instantiator.newInstance(DefaultMavenPomProjectManagement.class, objectFactory); } action.execute(issueManagement); } @Override public MavenPomIssueManagement getIssueManagement() { return issueManagement; } @Override public void ciManagement(Action<? super MavenPomCiManagement> action) { if (ciManagement == null) { ciManagement = instantiator.newInstance(DefaultMavenPomProjectManagement.class, objectFactory); } action.execute(ciManagement); } @Override public MavenPomCiManagement getCiManagement() { return ciManagement; } @Override public void distributionManagement(Action<? super MavenPomDistributionManagement> action) { if (distributionManagement == null) { distributionManagement = instantiator.newInstance(DefaultMavenPomDistributionManagement.class, instantiator, objectFactory); } action.execute(distributionManagement); } @Override public MavenPomDistributionManagementInternal getDistributionManagement() { return distributionManagement; } @Override public void mailingLists(Action<? super MavenPomMailingListSpec> action) { action.execute(this); } @Override public void mailingList(Action<? super MavenPomMailingList> action) { configureAndAdd(DefaultMavenPomMailingList.class, action, mailingLists); } @Override public List<MavenPomMailingList> getMailingLists() { return mailingLists; } @Override public MapProperty<String, String> getProperties() { return properties; } @Override public MavenProjectIdentity getProjectIdentity() { return mavenPublication.getMavenProjectIdentity(); } @Override public Set<MavenDependencyInternal> getApiDependencies() { return mavenPublication.getApiDependencies(); } @Override public Set<MavenDependencyInternal> getOptionalDependencies() { return mavenPublication.getOptionalDependencies(); } @Override public Set<MavenDependencyInternal> getRuntimeDependencies() { return mavenPublication.getRuntimeDependencies(); } @Override public Set<MavenDependency> getApiDependencyManagement() { return mavenPublication.getApiDependencyConstraints(); } @Override public Set<MavenDependency> getRuntimeDependencyManagement() { return mavenPublication.getRuntimeDependencyConstraints(); } @Override public Set<MavenDependency> getImportDependencyManagement() { return mavenPublication.getImportDependencyConstraints(); } private <T> void configureAndAdd(Class<? extends T> clazz, Action<? super T> action, List<T> items) { T item = instantiator.newInstance(clazz, objectFactory); action.execute(item); items.add(item); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.controller; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.bind.JAXBContext; import javax.xml.bind.Unmarshaller; import javax.xml.transform.dom.DOMSource; import org.apache.nifi.persistence.TemplateDeserializer; import org.apache.nifi.stream.io.StreamUtils; import org.apache.nifi.web.api.dto.ConnectableDTO; import org.apache.nifi.web.api.dto.ConnectionDTO; import org.apache.nifi.web.api.dto.ControllerServiceDTO; import org.apache.nifi.web.api.dto.FlowSnippetDTO; import org.apache.nifi.web.api.dto.ProcessGroupDTO; import org.apache.nifi.web.api.dto.ProcessorConfigDTO; import org.apache.nifi.web.api.dto.ProcessorDTO; import org.apache.nifi.web.api.dto.PropertyDescriptorDTO; import org.apache.nifi.web.api.dto.RelationshipDTO; import org.apache.nifi.web.api.dto.RemoteProcessGroupDTO; import org.apache.nifi.web.api.dto.TemplateDTO; import org.w3c.dom.Element; public class TemplateUtils { public static TemplateDTO parseDto(final Element templateElement) { try { JAXBContext context = JAXBContext.newInstance(TemplateDTO.class); Unmarshaller unmarshaller = context.createUnmarshaller(); return unmarshaller.unmarshal(new DOMSource(templateElement), TemplateDTO.class).getValue(); } catch (final Exception e) { throw new RuntimeException("Could not parse XML as a valid template", e); } } public static TemplateDTO parseDto(final byte[] bytes) { try (final InputStream in = new ByteArrayInputStream(bytes)) { return TemplateDeserializer.deserialize(in); } catch (final IOException ioe) { throw new RuntimeException("Could not parse bytes as template", ioe); // won't happen because of the types of streams being used } } public static List<Template> parseTemplateStream(final byte[] bytes) { final List<Template> templates = new ArrayList<>(); try (final InputStream rawIn = new ByteArrayInputStream(bytes); final DataInputStream in = new DataInputStream(rawIn)) { while (isMoreData(in)) { final int length = in.readInt(); final byte[] buffer = new byte[length]; StreamUtils.fillBuffer(in, buffer, true); final TemplateDTO dto = TemplateDeserializer.deserialize(new ByteArrayInputStream(buffer)); templates.add(new Template(dto)); } } catch (final IOException e) { throw new RuntimeException("Could not parse bytes", e); // won't happen because of the types of streams being used } return templates; } private static boolean isMoreData(final InputStream in) throws IOException { in.mark(1); final int nextByte = in.read(); if (nextByte == -1) { return false; } in.reset(); return true; } /** * Scrubs the template prior to persisting in order to remove fields that shouldn't be included or are unnecessary. * * @param templateDto template */ public static void scrubTemplate(final TemplateDTO templateDto) { scrubSnippet(templateDto.getSnippet()); } private static void scrubSnippet(final FlowSnippetDTO snippet) { // ensure that contents have been specified if (snippet != null) { // go through each processor if specified if (snippet.getProcessors() != null) { scrubProcessors(snippet.getProcessors()); } // go through each connection if specified if (snippet.getConnections() != null) { scrubConnections(snippet.getConnections()); } // go through each remote process group if specified if (snippet.getRemoteProcessGroups() != null) { scrubRemoteProcessGroups(snippet.getRemoteProcessGroups()); } // go through each process group if specified if (snippet.getProcessGroups() != null) { scrubProcessGroups(snippet.getProcessGroups()); } // go through each controller service if specified if (snippet.getControllerServices() != null) { scrubControllerServices(snippet.getControllerServices()); } } } /** * Scrubs process groups prior to saving. * * @param processGroups groups */ private static void scrubProcessGroups(final Set<ProcessGroupDTO> processGroups) { // go through each process group for (final ProcessGroupDTO processGroupDTO : processGroups) { processGroupDTO.setActiveRemotePortCount(null); processGroupDTO.setDisabledCount(null); processGroupDTO.setInactiveRemotePortCount(null); processGroupDTO.setInputPortCount(null); processGroupDTO.setInvalidCount(null); processGroupDTO.setOutputPortCount(null); processGroupDTO.setRunningCount(null); processGroupDTO.setStoppedCount(null); scrubSnippet(processGroupDTO.getContents()); } } /** * Scrubs processors prior to saving. This includes removing sensitive properties, validation errors, property descriptors, etc. * * @param processors procs */ private static void scrubProcessors(final Set<ProcessorDTO> processors) { // go through each processor for (final ProcessorDTO processorDTO : processors) { final ProcessorConfigDTO processorConfig = processorDTO.getConfig(); // ensure that some property configuration have been specified if (processorConfig != null) { // if properties have been specified, remove sensitive ones if (processorConfig.getProperties() != null) { Map<String, String> processorProperties = processorConfig.getProperties(); // look for sensitive properties and remove them if (processorConfig.getDescriptors() != null) { final Collection<PropertyDescriptorDTO> descriptors = processorConfig.getDescriptors().values(); for (PropertyDescriptorDTO descriptor : descriptors) { if (Boolean.TRUE.equals(descriptor.isSensitive())) { processorProperties.put(descriptor.getName(), null); } scrubPropertyDescriptor(descriptor); } } } processorConfig.setCustomUiUrl(null); processorConfig.setDefaultConcurrentTasks(null); processorConfig.setDefaultSchedulingPeriod(null); processorConfig.setAutoTerminatedRelationships(null); } if (processorDTO.getRelationships() != null) { for (final RelationshipDTO relationship : processorDTO.getRelationships()) { relationship.setDescription(null); } } processorDTO.setValidationErrors(null); processorDTO.setInputRequirement(null); processorDTO.setDescription(null); processorDTO.setInputRequirement(null); processorDTO.setPersistsState(null); processorDTO.setState(null); processorDTO.setSupportsBatching(null); processorDTO.setSupportsEventDriven(null); processorDTO.setSupportsParallelProcessing(null); } } /** * The only thing that we really need from the Property Descriptors in the templates is the * flag that indicates whether or not the property identifies a controller service. * Everything else is unneeded and makes templates very verbose and more importantly makes it * so that if one of these things changes, the template itself changes, which makes it hard to * use a CM tool for versioning. So we remove all that we don't need. * * @param descriptor the PropertyDescriptor to scrub */ private static void scrubPropertyDescriptor(final PropertyDescriptorDTO descriptor) { descriptor.setAllowableValues(null); descriptor.setDefaultValue(null); descriptor.setDescription(null); descriptor.setDisplayName(null); descriptor.setDynamic(null); descriptor.setRequired(null); descriptor.setSensitive(null); descriptor.setSupportsEl(null); } private static void scrubControllerServices(final Set<ControllerServiceDTO> controllerServices) { for (final ControllerServiceDTO serviceDTO : controllerServices) { final Map<String, String> properties = serviceDTO.getProperties(); final Map<String, PropertyDescriptorDTO> descriptors = serviceDTO.getDescriptors(); if (properties != null && descriptors != null) { for (final PropertyDescriptorDTO descriptor : descriptors.values()) { if (Boolean.TRUE.equals(descriptor.isSensitive())) { properties.put(descriptor.getName(), null); } scrubPropertyDescriptor(descriptor); } } serviceDTO.setCustomUiUrl(null); serviceDTO.setValidationErrors(null); } } /** * Scrubs connections prior to saving. This includes removing available relationships. * * @param connections conns */ private static void scrubConnections(final Set<ConnectionDTO> connections) { // go through each connection for (final ConnectionDTO connectionDTO : connections) { connectionDTO.setAvailableRelationships(null); scrubConnectable(connectionDTO.getSource()); scrubConnectable(connectionDTO.getDestination()); } } /** * Remove unnecessary fields in connectables prior to saving. * * @param connectable connectable */ private static void scrubConnectable(final ConnectableDTO connectable) { if (connectable != null) { connectable.setComments(null); connectable.setExists(null); connectable.setRunning(null); connectable.setTransmitting(null); connectable.setName(null); } } /** * Remove unnecessary fields in remote groups prior to saving. * * @param remoteGroups groups */ private static void scrubRemoteProcessGroups(final Set<RemoteProcessGroupDTO> remoteGroups) { // go through each remote process group for (final RemoteProcessGroupDTO remoteProcessGroupDTO : remoteGroups) { remoteProcessGroupDTO.setFlowRefreshed(null); remoteProcessGroupDTO.setInputPortCount(null); remoteProcessGroupDTO.setOutputPortCount(null); remoteProcessGroupDTO.setTransmitting(null); remoteProcessGroupDTO.setProxyPassword(null); remoteProcessGroupDTO.setActiveRemoteInputPortCount(null); remoteProcessGroupDTO.setInactiveRemoteInputPortCount(null); remoteProcessGroupDTO.setActiveRemoteOutputPortCount(null); remoteProcessGroupDTO.setInactiveRemoteOutputPortCount(null); remoteProcessGroupDTO.setAuthorizationIssues(null); remoteProcessGroupDTO.setFlowRefreshed(null); remoteProcessGroupDTO.setName(null); remoteProcessGroupDTO.setTargetSecure(null); remoteProcessGroupDTO.setTransmitting(null); } } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ecs.model; import java.io.Serializable; /** * <p> * Details on the network bindings between a container and its host * container instance. * </p> */ public class NetworkBinding implements Serializable, Cloneable { /** * The IP address that the container is bound to on the container * instance. */ private String bindIP; /** * The port number on the container that is be used with the network * binding. */ private Integer containerPort; /** * The port number on the host that is used with the network binding. */ private Integer hostPort; /** * The protocol used for the network binding. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>tcp, udp */ private String protocol; /** * The IP address that the container is bound to on the container * instance. * * @return The IP address that the container is bound to on the container * instance. */ public String getBindIP() { return bindIP; } /** * The IP address that the container is bound to on the container * instance. * * @param bindIP The IP address that the container is bound to on the container * instance. */ public void setBindIP(String bindIP) { this.bindIP = bindIP; } /** * The IP address that the container is bound to on the container * instance. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param bindIP The IP address that the container is bound to on the container * instance. * * @return A reference to this updated object so that method calls can be chained * together. */ public NetworkBinding withBindIP(String bindIP) { this.bindIP = bindIP; return this; } /** * The port number on the container that is be used with the network * binding. * * @return The port number on the container that is be used with the network * binding. */ public Integer getContainerPort() { return containerPort; } /** * The port number on the container that is be used with the network * binding. * * @param containerPort The port number on the container that is be used with the network * binding. */ public void setContainerPort(Integer containerPort) { this.containerPort = containerPort; } /** * The port number on the container that is be used with the network * binding. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param containerPort The port number on the container that is be used with the network * binding. * * @return A reference to this updated object so that method calls can be chained * together. */ public NetworkBinding withContainerPort(Integer containerPort) { this.containerPort = containerPort; return this; } /** * The port number on the host that is used with the network binding. * * @return The port number on the host that is used with the network binding. */ public Integer getHostPort() { return hostPort; } /** * The port number on the host that is used with the network binding. * * @param hostPort The port number on the host that is used with the network binding. */ public void setHostPort(Integer hostPort) { this.hostPort = hostPort; } /** * The port number on the host that is used with the network binding. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param hostPort The port number on the host that is used with the network binding. * * @return A reference to this updated object so that method calls can be chained * together. */ public NetworkBinding withHostPort(Integer hostPort) { this.hostPort = hostPort; return this; } /** * The protocol used for the network binding. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>tcp, udp * * @return The protocol used for the network binding. * * @see TransportProtocol */ public String getProtocol() { return protocol; } /** * The protocol used for the network binding. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>tcp, udp * * @param protocol The protocol used for the network binding. * * @see TransportProtocol */ public void setProtocol(String protocol) { this.protocol = protocol; } /** * The protocol used for the network binding. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>tcp, udp * * @param protocol The protocol used for the network binding. * * @return A reference to this updated object so that method calls can be chained * together. * * @see TransportProtocol */ public NetworkBinding withProtocol(String protocol) { this.protocol = protocol; return this; } /** * The protocol used for the network binding. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>tcp, udp * * @param protocol The protocol used for the network binding. * * @see TransportProtocol */ public void setProtocol(TransportProtocol protocol) { this.protocol = protocol.toString(); } /** * The protocol used for the network binding. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>tcp, udp * * @param protocol The protocol used for the network binding. * * @return A reference to this updated object so that method calls can be chained * together. * * @see TransportProtocol */ public NetworkBinding withProtocol(TransportProtocol protocol) { this.protocol = protocol.toString(); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getBindIP() != null) sb.append("BindIP: " + getBindIP() + ","); if (getContainerPort() != null) sb.append("ContainerPort: " + getContainerPort() + ","); if (getHostPort() != null) sb.append("HostPort: " + getHostPort() + ","); if (getProtocol() != null) sb.append("Protocol: " + getProtocol() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getBindIP() == null) ? 0 : getBindIP().hashCode()); hashCode = prime * hashCode + ((getContainerPort() == null) ? 0 : getContainerPort().hashCode()); hashCode = prime * hashCode + ((getHostPort() == null) ? 0 : getHostPort().hashCode()); hashCode = prime * hashCode + ((getProtocol() == null) ? 0 : getProtocol().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof NetworkBinding == false) return false; NetworkBinding other = (NetworkBinding)obj; if (other.getBindIP() == null ^ this.getBindIP() == null) return false; if (other.getBindIP() != null && other.getBindIP().equals(this.getBindIP()) == false) return false; if (other.getContainerPort() == null ^ this.getContainerPort() == null) return false; if (other.getContainerPort() != null && other.getContainerPort().equals(this.getContainerPort()) == false) return false; if (other.getHostPort() == null ^ this.getHostPort() == null) return false; if (other.getHostPort() != null && other.getHostPort().equals(this.getHostPort()) == false) return false; if (other.getProtocol() == null ^ this.getProtocol() == null) return false; if (other.getProtocol() != null && other.getProtocol().equals(this.getProtocol()) == false) return false; return true; } @Override public NetworkBinding clone() { try { return (NetworkBinding) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package edu.wpi.rail.jrosbridge; import javax.json.Json; import javax.json.JsonObject; import edu.wpi.rail.jrosbridge.callback.ServiceCallback; import edu.wpi.rail.jrosbridge.callback.CallServiceCallback; import edu.wpi.rail.jrosbridge.services.ServiceRequest; import edu.wpi.rail.jrosbridge.services.ServiceResponse; /** * The Service object is responsible for calling or advertising a service in ROS. * * @author Russell Toris - rctoris@wpi.edu * @version November 26, 2014 */ public class Service { private final Ros ros; private final String name; private final String type; private boolean isAdvertised; /** * Create a ROS service with the given information. * * @param ros * A handle to the ROS connection. * @param name * The name of the service (e.g., "/add_two_ints"). * @param type * The service type (e.g., "rospy_tutorials/AddTwoInts"). */ public Service(Ros ros, String name, String type) { this.ros = ros; this.name = name; this.type = type; this.isAdvertised = false; } /** * Get the ROS connection handle for this service. * * @return The ROS connection handle for this service. */ public Ros getRos() { return this.ros; } /** * Get the name of this service. * * @return The name of this service. */ public String getName() { return this.name; } /** * Return the service type of this service. * * @return The service type of this service. */ public String getType() { return this.type; } /** * Check if the current service is advertising to ROS. * * @return If the current service is advertising to ROS. */ public boolean isAdvertised() { return this.isAdvertised; } /** * Call this service. The callback function will be called with the * associated service response. * * @param request * The service request to send. * @param cb * The callback used when the associated response comes back. */ public void callService(ServiceRequest request, ServiceCallback cb) { // construct the unique ID String callServceId = "call_service:" + this.name + ":" + this.ros.nextId(); // register the callback function this.ros.registerServiceCallback(callServceId, cb); // build and send the rosbridge call JsonObject call = Json.createObjectBuilder() .add(JRosbridge.FIELD_OP, JRosbridge.OP_CODE_CALL_SERVICE) .add(JRosbridge.FIELD_ID, callServceId) .add(JRosbridge.FIELD_TYPE, this.type) .add(JRosbridge.FIELD_SERVICE, this.name) .add(JRosbridge.FIELD_ARGS, request.toJsonObject()).build(); this.ros.send(call); } /** * Send a service response. * * @param response * The service response to send. * @param id * The ID of the response (matching that of the service call). */ public void sendResponse(ServiceResponse response, String id) { // build and send the rosbridge call JsonObject call = Json.createObjectBuilder() .add(JRosbridge.FIELD_OP, JRosbridge.OP_CODE_SERVICE_RESPONSE) .add(JRosbridge.FIELD_ID, id) .add(JRosbridge.FIELD_SERVICE, this.name) .add(JRosbridge.FIELD_VALUES, response.toJsonObject()) .add(JRosbridge.FIELD_RESULT, response.getResult()).build(); this.ros.send(call); } /** * Registers as service advertiser. */ public void advertiseService(CallServiceCallback cb) { // register the callback this.ros.registerCallServiceCallback(this.name, cb); // build and send the rosbridge call JsonObject call = Json.createObjectBuilder() .add(JRosbridge.FIELD_OP, JRosbridge.OP_CODE_ADVERTISE_SERVICE) .add(JRosbridge.FIELD_TYPE, this.type) .add(JRosbridge.FIELD_SERVICE, this.name).build(); this.ros.send(call); // set the flag indicating we are registered this.isAdvertised = true; } /** * Unregisters as service advertiser. */ public void unadvertiseService() { this.ros.deregisterCallServiceCallback(this.name); // build and send the rosbridge call JsonObject call = Json.createObjectBuilder() .add(JRosbridge.FIELD_OP, JRosbridge.OP_CODE_UNADVERTISE_SERVICE) .add(JRosbridge.FIELD_SERVICE, this.name).build(); this.ros.send(call); // set the flag indicating we are registered this.isAdvertised = false; } /** * Call the service and wait for a response. This is a blocking call and * will only return once rosbridge returns the service response. For an * asynchronous version of this call, see the * {@link #callService(ServiceRequest request, ServiceCallback cb) * callService} method. * * @param request * The service request to send. * @return The corresponding service response from ROS. */ public synchronized ServiceResponse callServiceAndWait( ServiceRequest request) { // private inner class to use as a callback BlockingCallback cb = new BlockingCallback(this); // use the asynchronous version and block on the result this.callService(request, cb); // wait for a response while (cb.getResponse() == null) { try { this.wait(); } catch (InterruptedException e) { // continue on } } return cb.getResponse(); } /** * A private {@link edu.wpi.rail.jrosbridge.callback.ServiceCallback * ServiceCallback} used to block and wait for a response from rosbridge. * * @author Russell Toris - rctoris@wpi.edu * @version April 1, 2014 */ private class BlockingCallback implements ServiceCallback { private ServiceResponse response; private Service service; /** * Create a new callback function which will notify the given * {@link edu.wpi.rail.jrosbridge.Service Service} once a response * has been received. * * @param service * The {@link edu.wpi.rail.jrosbridge.Service Service} * to notify once a response has been received. */ public BlockingCallback(Service service) { this.response = null; this.service = service; } /** * Store the response internally and notify the corresponding * {@link edu.wpi.rail.jrosbridge.Service Service}. * * @param response * The incoming service response from ROS. */ @Override public void handleServiceResponse(ServiceResponse response) { this.response = response; synchronized (this.service) { this.service.notifyAll(); } } /** * Get the response stored in this callback, if one exists. Otherwise, * null is returned. * * @return The resulting service response from ROS, or null if one does * not exist yet. */ public ServiceResponse getResponse() { return this.response; } } }
/* * Copyright 2017 dmfs GmbH * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.dmfs.android.colorpicker.palettes; import android.content.Context; import android.content.res.Resources; import android.os.Parcel; import android.os.Parcelable; /** * A palette that gets colors and names from arrays. If no column number is specified, this class uses the next integer below the square root fo the number of * colors. * * @author Marten Gajda */ public final class ArrayPalette implements Palette { public static final Parcelable.Creator<ArrayPalette> CREATOR = new Parcelable.Creator<ArrayPalette>() { @Override public ArrayPalette createFromParcel(Parcel in) { return new ArrayPalette(in.readString(), in.readString(), in.createIntArray(), in.readInt(), in.createStringArray()); } @Override public ArrayPalette[] newArray(int size) { return new ArrayPalette[size]; } }; /** * The name of this palette. */ private final String mPaletteName; /** * The Id of this palette. */ private final String mPaletteId; /** * The colors in this palette. */ private final int[] mColors; /** * The names of the colors. */ private final String[] mColorNames; /** * The number of columns to use for the layout of this palette. */ private final int mColumns; /** * Build a new palette from arrays. * * @param id * An identifier for this palette. * @param paletteName * The name of the palette. * @param colors * An array of colors. * @param columns * The number of columns to show when determining the layout for this palette. * @param names * The names of the colors, or <code>null</code> if the colors don't have names. */ public ArrayPalette(String id, String paletteName, int[] colors, int columns, String[] names) { mPaletteId = id; mPaletteName = paletteName; mColors = colors; mColorNames = names; mColumns = columns; } /** * Build a new palette from arrays using a square layout if possible. * * @param id * An identifier for this palette. * @param paletteName * The name of the palette. * @param colors * An array of colors. * @param names * The names of the colors, or <code>null</code> if the colors don't have names. */ public ArrayPalette(String id, String paletteName, int[] colors, String[] names) { this(id, paletteName, colors, (int) Math.floor(Math.sqrt(colors.length)), names); } /** * Build a new palette from arrays. * * @param id * An identifier for this palette. * @param paletteName * The name of the palette. * @param colors * An array of colors. * @param columns * The number of columns to show when determining the layout for this palette. */ public ArrayPalette(String id, String paletteName, int[] colors, int columns) { this(id, paletteName, colors, columns, null); } /** * Build a new palette from arrays using a square layout if possible. * * @param id * An identifier for this palette. * @param paletteName * The name of the palette. * @param colors * An array of colors. */ public ArrayPalette(String id, String paletteName, int[] colors) { this(id, paletteName, colors, (int) Math.floor(Math.sqrt(colors.length)), null); } /** * Get an {@link ArrayPalette} from {@link Resources}. * * @param resources * The {@link Resources}. * @param id * An identifier for this palette. * @param paletteName * A string resource id for the palette name. * @param colorArray * The integer array resource id for the colors. * @param columns * The number of columns to use for the layout. * @param colorNameArray * A string array resource for the color names. * * @return An {@link ArrayPalette} instance. */ public static ArrayPalette fromResources(Resources resources, String id, int paletteName, int colorArray, int columns, int colorNameArray) { return new ArrayPalette(id, resources.getString(paletteName), resources.getIntArray(colorArray), columns, resources.getStringArray(colorNameArray)); } /** * Get an {@link ArrayPalette} from {@link Resources}. * * @param resources * The {@link Resources}. * @param id * An identifier for this palette. * @param paletteName * A string resource id for the palette name. * @param colorArray * The integer array resource id for the colors. * @param colorNameArray * A string array resource for the color names. * * @return An {@link ArrayPalette} instance. */ public static ArrayPalette fromResources(Resources resources, String id, int paletteName, int colorArray, int colorNameArray) { return new ArrayPalette(id, resources.getString(paletteName), resources.getIntArray(colorArray), resources.getStringArray(colorNameArray)); } /** * Get an {@link ArrayPalette} from {@link Resources}. * * @param resources * The {@link Resources}. * @param id * An identifier for this palette. * @param paletteName * A string resource id for the palette name. * @param colorArray * The integer array resource id for the colors. * * @return An {@link ArrayPalette} instance. */ public static ArrayPalette fromResources(Resources resources, String id, int paletteName, int colorArray) { return new ArrayPalette(id, resources.getString(paletteName), resources.getIntArray(colorArray)); } /** * Get an {@link ArrayPalette} from the resources. * * @param Context * A {@link Context}. * @param id * An identifier for this palette. * @param paletteName * A string resource id for the palette name. * @param colorArray * The integer array resource id for the colors. * @param columns * The number of columns to use for the layout. * @param colorNameArray * A string array resource for the color names. * * @return An {@link ArrayPalette} instance. */ public static ArrayPalette fromResources(Context context, String id, int paletteName, int colorArray, int columns, int colorNameArray) { return fromResources(context.getResources(), id, paletteName, colorArray, columns, colorNameArray); } /** * Get an {@link ArrayPalette} from the resources. * * @param Context * A {@link Context}. * @param id * An identifier for this palette. * @param paletteName * A string resource id for the palette name. * @param colorArray * The integer array resource id for the colors. * @param colorNameArray * A string array resource for the color names. * * @return An {@link ArrayPalette} instance. */ public static ArrayPalette fromResources(Context context, String id, int paletteName, int colorArray, int colorNameArray) { return fromResources(context.getResources(), id, paletteName, colorArray, colorNameArray); } /** * Get an {@link ArrayPalette} from the resources. * * @param Context * A {@link Context}. * @param id * An identifier for this palette. * @param paletteName * A string resource id for the palette name. * @param colorArray * The integer array resource id for the colors. * * @return An {@link ArrayPalette} instance. */ public static ArrayPalette fromResources(Context context, String id, int paletteName, int colorArray) { return fromResources(context.getResources(), id, paletteName, colorArray); } @Override public String name() { return mPaletteName; } @Override public String id() { return mPaletteId; } @Override public int numberOfColors() { return mColors.length; } @Override public int colorAt(int index) { return mColors[index]; } @Override public String nameOfColorAt(int index) { if (mColorNames != null) { return mColorNames[index]; } return null; } @Override public int numberOfColumns() { return mColumns; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(mPaletteId); dest.writeString(mPaletteName); dest.writeIntArray(mColors); dest.writeInt(mColumns); dest.writeStringArray(mColorNames); } }
// Copyright (c) 2014 Jamison Hope // This is free software; for terms and warranty disclaimer see ./COPYING. package gnu.math; import java.io.*; /** * A quaternion number using plain double values. * @author Jamison Hope */ public class DQuaternion extends Quaternion implements Externalizable { double real; double imag; double jmag; double kmag; public DQuaternion() { } public DQuaternion(double real, double imag, double jmag, double kmag) { this.real = real; this.imag = imag; this.jmag = jmag; this.kmag = kmag; } public RealNum re () { return new DFloNum (real); } public double doubleValue() { return real; } public RealNum im () { return new DFloNum (imag); } public double doubleImagValue () { return imag; } public RealNum jm () { return new DFloNum (jmag); } public double doubleJmagValue () { return jmag; } public RealNum km () { return new DFloNum (kmag); } public double doubleKmagValue () { return kmag; } public boolean isExact () { return false; } public Quaternion toExact() { return new CQuaternion(DFloNum.toExact(real), DFloNum.toExact(imag), DFloNum.toExact(jmag), DFloNum.toExact(kmag)); } public boolean equals(Object obj) { if (obj == null || !(obj instanceof Quaternion)) return false; Quaternion y = (Quaternion)obj; return y.unit() == Unit.Empty && (Double.doubleToLongBits(real) == Double.doubleToLongBits(y.reValue())) && (Double.doubleToLongBits(imag) == Double.doubleToLongBits(y.imValue())) && (Double.doubleToLongBits(jmag) == Double.doubleToLongBits(y.jmValue())) && (Double.doubleToLongBits(kmag) == Double.doubleToLongBits(y.kmValue())); } public String toString() { String reString = DFloNum.toString(real); if ((Double.doubleToLongBits(imag) == 0) && (Double.doubleToLongBits(jmag) == 0) && (Double.doubleToLongBits(kmag) == 0)) return reString; StringBuilder sbuf = new StringBuilder(); if (!reString.equals("0.0")) sbuf.append(reString); if (Double.doubleToLongBits(imag) != 0) { String imString = DFloNum.toString(imag); char ch0 = imString.charAt(0); if (ch0 != '-' && ch0 != '+') sbuf.append('+'); sbuf.append(imString); sbuf.append('i'); } if (Double.doubleToLongBits(jmag) != 0) { String jmString = DFloNum.toString(jmag); char ch0 = jmString.charAt(0); if (ch0 != '-' && ch0 != '+') sbuf.append('+'); sbuf.append(jmString); sbuf.append('j'); } if (Double.doubleToLongBits(kmag) != 0) { String kmString = DFloNum.toString(kmag); char ch0 = kmString.charAt(0); if (ch0 != '-' && ch0 != '+') sbuf.append('+'); sbuf.append(kmString); sbuf.append('k'); } return sbuf.toString(); } public String toString(int radix) { if (radix == 10) return toString(); return "#d" + toString(); } public final Numeric neg() { return new DQuaternion(-real, -imag, -jmag, -kmag); } public Numeric add (Object y, int k) { if (y instanceof Quaternion) { Quaternion yq = (Quaternion)y; if (yq.dimensions() != Dimensions.Empty) throw new ArithmeticException ("units mis-match"); return Quaternion.make(real + k * yq.reValue(), imag + k * yq.imValue(), jmag + k * yq.jmValue(), kmag + k * yq.kmValue()); } return ((Numeric)y).addReversed(this, k); } public Numeric mul (Object y) { if (y instanceof Quaternion) { Quaternion yq = (Quaternion)y; if (yq.unit() == Unit.Empty) { double y_re = yq.reValue(); double y_im = yq.imValue(); double y_jm = yq.jmValue(); double y_km = yq.kmValue(); return Quaternion.make (real * y_re - imag * y_im - jmag * y_jm - kmag * y_km, real * y_im + imag * y_re + jmag * y_km - kmag * y_jm, real * y_jm - imag * y_km + jmag * y_re + kmag * y_im, real * y_km + imag * y_jm - jmag * y_im + kmag * y_re); } return Quaternion.times(this, yq); } return ((Numeric)y).mulReversed(this); } public Numeric div(Object y) { if (y instanceof Quaternion) { Quaternion yq = (Quaternion) y; return DQuaternion.div(real, imag, jmag, kmag, yq.doubleValue(), yq.doubleImagValue(), yq.doubleJmagValue(), yq.doubleKmagValue()); } return ((Numeric)y).divReversed(this); } public static double hypot4(double w, double x, double y, double z) { return Math.hypot(Math.hypot(w,x),Math.hypot(y,z)); } public static double hypot3(double x, double y, double z) { return Math.hypot(Math.hypot(x,y),z); } public static Quaternion power(double x_re, double x_im, double x_jm, double x_km, double y_re, double y_im, double y_jm, double y_km) { if (x_jm == 0.0 && x_km == 0.0 && y_jm == 0.0 && y_km == 0.0) return DComplex.power(x_re, x_im, y_re, y_im); if (x_re == 0.0 && x_im == 0.0 && x_jm == 0.0 && x_km == 0.0) if (y_re > 0.0) return DFloNum.valueOf(0.0); else if (y_re == 0.0 && y_im == 0.0 && y_jm == 0.0 && y_km == 0.0) return DFloNum.valueOf(1.0); // ln(x) double qmag = hypot4(x_re, x_im, x_jm, x_km); double vmag = hypot3(x_im, x_jm, x_km); double atv = Math.atan2(vmag, x_re) / vmag; double ln_r = Math.log(qmag); double ln_i = atv * x_im; double ln_j = atv * x_jm; double ln_k = atv * x_km; // ln(x)*y double p_r = ln_r * y_re - ln_i * y_im - ln_j * y_jm - ln_k * y_km; double p_i = ln_r * y_im + ln_i * y_re + ln_j * y_km - ln_k * y_jm; double p_j = ln_r * y_jm - ln_i * y_km + ln_j * y_re + ln_k * y_im; double p_k = ln_r * y_km + ln_i * y_jm - ln_j * y_im + ln_k * y_re; // exp(ln(x)*y) double pvmag = hypot3(p_i,p_j,p_k); double sinpvmag = Math.sin(pvmag); double expr = Math.exp(p_r); if (pvmag == 0.0 || sinpvmag == 0.0) return DFloNum.valueOf(expr * Math.cos(pvmag)); return Quaternion.make(expr * Math.cos(pvmag), expr * sinpvmag * p_i / pvmag, expr * sinpvmag * p_j / pvmag, expr * sinpvmag * p_k / pvmag); } public static Quaternion exp(double x_re, double x_im, double x_jm, double x_km) { if (x_jm == 0.0 && x_km == 0.0) return Complex.polar(Math.exp(x_re), x_im); double vmag = hypot3(x_im,x_jm,x_km); double sinvmag = Math.sin(vmag); double expr = Math.exp(x_re); return Quaternion.make(expr * Math.cos(vmag), expr * sinvmag * x_im / vmag, expr * sinvmag * x_jm / vmag, expr * sinvmag * x_km / vmag); } public static Quaternion log(double x_re, double x_im, double x_jm, double x_km) { if (x_jm == 0.0 && x_km == 0.0) return DComplex.log(x_re, x_im); double qmag = hypot4(x_re,x_im,x_jm,x_km); double vmag = hypot3(x_im,x_jm,x_km); double atv = Math.atan2(vmag, x_re) / vmag; double r = Math.log(qmag); double i = atv * x_im; double j = atv * x_jm; double k = atv * x_km; return Quaternion.make(r, i, j, k); } public static Quaternion div(double x_re, double x_im, double x_jm, double x_km, double y_re, double y_im, double y_jm, double y_km) { if (x_jm == 0.0 && x_km == 0.0 && y_jm == 0.0 && y_km == 0.0) return DComplex.div(x_re, x_im, y_re, y_im); double y_norm = y_re*y_re + y_im*y_im + y_jm*y_jm + y_km*y_km; // This computes (y^-1 * x), which is different from (x * y^-1). double r = x_re*y_re + x_im*y_im + x_jm*y_jm + x_km*y_km; double i = x_im*y_re - x_re*y_im + x_km*y_jm - x_jm*y_km; double j = x_jm*y_re - x_re*y_jm + x_im*y_km - x_km*y_im; double k = x_km*y_re - x_re*y_km + x_jm*y_im - x_im*y_jm; return Quaternion.make(r/y_norm, i/y_norm, j/y_norm, k/y_norm); } public static Quaternion sqrt(double x_re, double x_im, double x_jm, double x_km) { if (x_jm == 0.0 && x_km == 0.0) return DComplex.sqrt(x_re, x_im); double qmag = hypot4(x_re,x_im,x_jm,x_km); double vmag = hypot3(x_im,x_jm,x_km); double t = Math.acos(x_re/qmag); double y_mag = Math.sqrt(qmag); double s = Math.sin(t/2); return Quaternion.make(y_mag * Math.cos(t/2), y_mag * s * x_im / vmag, y_mag * s * x_jm / vmag, y_mag * s * x_km / vmag); } public static Quaternion sin(double x_re, double x_im, double x_jm, double x_km) { if (x_jm == 0.0 && x_km == 0.0) return DComplex.sin(x_re, x_im); double vmag = hypot3(x_im,x_jm,x_km); double r = Math.sin(x_re) * Math.cosh(vmag); double v = Math.cos(x_re) * Math.sinh(vmag); return Quaternion.make(r, v * x_im/vmag, v * x_jm/vmag, v * x_km/vmag); } public static Quaternion cos(double x_re, double x_im, double x_jm, double x_km) { if (x_jm == 0.0 && x_km == 0.0) return DComplex.cos(x_re, x_im); double vmag = hypot3(x_im,x_jm,x_km); double r = Math.cos(x_re) * Math.cosh(vmag); double v = -Math.sin(x_re) * Math.sinh(vmag); return Quaternion.make(r, v * x_im/vmag, v * x_jm/vmag, v * x_km/vmag); } public static Quaternion tan(double x_re, double x_im, double x_jm, double x_km) { if (x_jm == 0.0 && x_km == 0.0) return DComplex.tan(x_re, x_im); double vmag = hypot3(x_im,x_jm,x_km); double sin_re = Math.sin(x_re); double cos_re = Math.cos(x_re); double sinh_v = Math.sinh(vmag); double cosh_v = Math.cosh(vmag); // tan = sin/cos return DQuaternion.div(sin_re*cosh_v, cos_re*sinh_v*x_im/vmag, cos_re*sinh_v*x_jm/vmag, cos_re*sinh_v*x_km/vmag, cos_re*cosh_v, -sin_re*sinh_v*x_im/vmag, -sin_re*sinh_v*x_jm/vmag, -sin_re*sinh_v*x_km/vmag); } /** * @serialData Writes the real part, followed by the imaginary parts. * All are written as doubles (using writeDouble). */ public void writeExternal(ObjectOutput out) throws IOException { out.writeDouble(real); out.writeDouble(imag); out.writeDouble(jmag); out.writeDouble(kmag); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { real = in.readDouble(); imag = in.readDouble(); jmag = in.readDouble(); kmag = in.readDouble(); } }
/* * Copyright (C) 2015 Orange * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orange.ngsi.server; import com.orange.ngsi.exception.MismatchIdException; import com.orange.ngsi.exception.MissingRequestParameterException; import com.orange.ngsi.model.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import java.net.URI; import java.util.List; /** * Created by pborscia on 07/08/2015. */ @Component public class NgsiValidation { private static Logger logger = LoggerFactory.getLogger(NgsiValidation.class); public void checkUpdateContext(UpdateContext updateContext) throws MissingRequestParameterException { if (updateContext.getUpdateAction() == null) { throw new MissingRequestParameterException("updateAction", "string"); } if (nullOrEmpty(updateContext.getContextElements())) { throw new MissingRequestParameterException("contextElements", "List<ContextElement>"); } for (ContextElement contextElement : updateContext.getContextElements()) { checkContextElement(contextElement); } } public void checkNotifyContext(NotifyContext notifyContext) throws MissingRequestParameterException { if (nullOrEmpty(notifyContext.getSubscriptionId())) { throw new MissingRequestParameterException("subscriptionId", "string"); } if (nullOrEmpty(notifyContext.getOriginator())){ throw new MissingRequestParameterException("originator", "URI"); } if (nullOrEmpty(notifyContext.getContextElementResponseList())) { throw new MissingRequestParameterException("contextElementResponse", "List<ContextElementResponse>"); } for (ContextElementResponse contextElementResponse : notifyContext.getContextElementResponseList()) { checkContextElementResponse(contextElementResponse); } } public void checkRegisterContext(RegisterContext registerContext) throws MissingRequestParameterException { if (nullOrEmpty(registerContext.getContextRegistrationList())) { throw new MissingRequestParameterException("contextRegistrations", "List<ContextRegistration>"); } for (ContextRegistration contextRegistration : registerContext.getContextRegistrationList()) { checkContextRegistration(contextRegistration); } } public void checkSubscribeContext(SubscribeContext subscribeContext) throws MissingRequestParameterException { if (nullOrEmpty(subscribeContext.getEntityIdList())) { throw new MissingRequestParameterException("entities", "List<EntityId>"); } for(EntityId entityId: subscribeContext.getEntityIdList()) { checkEntityId(entityId); } if (nullOrEmpty(subscribeContext.getReference())){ throw new MissingRequestParameterException("reference", "URI"); } if (subscribeContext.getRestriction() != null) { if (nullOrEmpty(subscribeContext.getRestriction().getAttributeExpression()) && nullOrEmpty(subscribeContext.getRestriction().getScopes())) { throw new MissingRequestParameterException("attributeExpression or scopes", "string"); } } } public void checkUpdateContextSubscription(UpdateContextSubscription updateContextSubscription) throws MissingRequestParameterException { if (nullOrEmpty(updateContextSubscription.getSubscriptionId())){ throw new MissingRequestParameterException("subscriptionId", "String"); } if (updateContextSubscription.getRestriction() != null) { if (nullOrEmpty(updateContextSubscription.getRestriction().getAttributeExpression()) && nullOrEmpty(updateContextSubscription.getRestriction().getScopes())) { throw new MissingRequestParameterException("attributeExpression or scopes", "string"); } } } public void checkUnsubscribeContext(UnsubscribeContext unsubscribeContext) throws MissingRequestParameterException { if (nullOrEmpty(unsubscribeContext.getSubscriptionId())){ throw new MissingRequestParameterException("subscriptionId", "String"); } } public void checkQueryContext(QueryContext queryContext) throws MissingRequestParameterException { if (nullOrEmpty(queryContext.getEntityIdList())) { throw new MissingRequestParameterException("entities", "List<EntityId>"); } for(EntityId entityId : queryContext.getEntityIdList()) { checkEntityId(entityId); } if (queryContext.getRestriction() != null) { if (nullOrEmpty(queryContext.getRestriction().getAttributeExpression())) { throw new MissingRequestParameterException("attributeExpression", "string"); } } } public void checkAppendContextElement(AppendContextElement appendContextElement) throws MissingRequestParameterException { if (nullOrEmpty(appendContextElement.getAttributeList())) { throw new MissingRequestParameterException("contextAttributes", "List<ContextAttribute>"); } } public void checkUpdateContextElement(UpdateContextElement updateContextElement) throws MissingRequestParameterException { if (nullOrEmpty(updateContextElement.getContextAttributes())) { throw new MissingRequestParameterException("contextAttributes", "List<ContextAttribute>"); } } public void checkUpdateContextAttribute(String entityID, String attributeName, String valueID, UpdateContextAttribute updateContextAttribute) throws MissingRequestParameterException, MismatchIdException { if (nullOrEmpty(entityID)) { throw new MissingRequestParameterException("entityID", "string"); } if (nullOrEmpty(attributeName)) { throw new MissingRequestParameterException("attributeName", "string"); } if (updateContextAttribute == null || updateContextAttribute.getAttribute() == null) { throw new MissingRequestParameterException("attribute", "ContextAttribute"); } // Check attribute name matching if (!attributeName.equals(updateContextAttribute.getAttribute().getName())) { throw new MismatchIdException(attributeName, updateContextAttribute.getAttribute().getName()); } // Check optional valueID matching if (valueID != null) { if (nullOrEmpty(valueID)) { // tests just emptiness throw new MissingRequestParameterException("valueID", "string"); } if (updateContextAttribute.getAttribute().getMetadata() == null) { throw new MissingRequestParameterException("metadata", "Metadata"); } // Check Metadata ID exists and equals valueID for (ContextMetadata metadata : updateContextAttribute.getAttribute().getMetadata()) { if ("ID".equals(metadata.getName())) { if (valueID.equals(metadata.getValue())) { return; // ! \\ Early return ! } throw new MismatchIdException(valueID, String.valueOf(metadata.getValue())); } } throw new MissingRequestParameterException("ID", "Metadata ID"); } } public void checkUpdateSubscription(String subscriptionID, UpdateContextSubscription updateContextSubscription) throws MissingRequestParameterException, MismatchIdException { checkUpdateContextSubscription(updateContextSubscription); // Check that subscriptionID parameter is equal to the one given in the message body if (!subscriptionID.equals(updateContextSubscription.getSubscriptionId())) { throw new MismatchIdException(subscriptionID, updateContextSubscription.getSubscriptionId()); } } private void checkContextElementResponse(ContextElementResponse contextElementResponse) throws MissingRequestParameterException { if (contextElementResponse.getStatusCode() == null) { throw new MissingRequestParameterException("statusCode", "StatusCode"); } if (contextElementResponse.getContextElement() == null) { throw new MissingRequestParameterException("contextElement", "ContextElement"); } checkContextElement(contextElementResponse.getContextElement()); } private void checkContextElement(ContextElement contextElement) throws MissingRequestParameterException { if (contextElement.getEntityId() == null) { throw new MissingRequestParameterException("entityId", "EntityId"); } checkEntityId(contextElement.getEntityId()); if (nullOrEmpty(contextElement.getContextAttributeList())) { throw new MissingRequestParameterException("contextAttributes", "List<ContextAttribute>"); } } private void checkEntityId(EntityId entityId) throws MissingRequestParameterException { if (nullOrEmpty(entityId.getId())) { throw new MissingRequestParameterException("id", "string"); } if (nullOrEmpty(entityId.getType())) { throw new MissingRequestParameterException("type", "string"); } if (entityId.getIsPattern() == null) { entityId.setIsPattern(false); } } private void checkContextRegistration(ContextRegistration contextRegistration) throws MissingRequestParameterException { if (nullOrEmpty(contextRegistration.getProvidingApplication())){ throw new MissingRequestParameterException("providingApplication", "URI"); } if (contextRegistration.getEntityIdList() != null) { for(EntityId entityId: contextRegistration.getEntityIdList()) { checkEntityId(entityId); } } if (contextRegistration.getContextRegistrationAttributeList() != null) { for(ContextRegistrationAttribute attribute: contextRegistration.getContextRegistrationAttributeList()) { checkContextRegistrationAttribute(attribute); } } } private void checkContextRegistrationAttribute(ContextRegistrationAttribute attribute) throws MissingRequestParameterException { if ((attribute.getName() == null) || (attribute.getName().isEmpty())) { throw new MissingRequestParameterException("name", "string"); } if (attribute.getIsDomain() == null) { throw new MissingRequestParameterException("isDomain", "boolean"); } } private static boolean nullOrEmpty(URI e) { return e == null || e.toString().isEmpty(); } private static boolean nullOrEmpty(String e) { return e == null || e.isEmpty(); } private static boolean nullOrEmpty(List e) { return e == null || e.isEmpty(); } }
package org.apache.lucene.spatial.composite; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.Map; import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.SpatialRelation; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.spatial.prefix.AbstractVisitingPrefixTreeFilter; import org.apache.lucene.spatial.prefix.tree.Cell; import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; import org.apache.lucene.util.Bits; import org.apache.lucene.util.DocIdSetBuilder; /** * A spatial Intersects predicate that distinguishes an approximated match from an exact match based on which cells * are within the query shape. It exposes a {@link TwoPhaseIterator} that will verify a match with a provided * predicate in the form of a {@link ValueSource} by calling {@link FunctionValues#boolVal(int)}. * * @lucene.internal */ public class IntersectsRPTVerifyQuery extends Query { private final IntersectsDifferentiatingFilter intersectsDiffFilter; private final ValueSource predicateValueSource; // we call FunctionValues.boolVal(doc) public IntersectsRPTVerifyQuery(Shape queryShape, String fieldName, SpatialPrefixTree grid, int detailLevel, int prefixGridScanLevel, ValueSource predicateValueSource) { this.predicateValueSource = predicateValueSource; this.intersectsDiffFilter = new IntersectsDifferentiatingFilter(queryShape, fieldName, grid, detailLevel, prefixGridScanLevel); } @Override public String toString(String field) { return "IntersectsVerified(fieldName=" + field + ")"; } @Override public boolean equals(Object o) { if (this == o) return true; if (!super.equals(o)) return false; IntersectsRPTVerifyQuery that = (IntersectsRPTVerifyQuery) o; if (!intersectsDiffFilter.equals(that.intersectsDiffFilter)) return false; return predicateValueSource.equals(that.predicateValueSource); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + intersectsDiffFilter.hashCode(); result = 31 * result + predicateValueSource.hashCode(); return result; } @Override public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { final Map valueSourceContext = ValueSource.newContext(searcher); return new ConstantScoreWeight(this) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { // Compute approx & exact final IntersectsDifferentiatingFilter.IntersectsDifferentiatingVisitor result = intersectsDiffFilter.compute(context, null); if (result.approxDocIdSet == null) { return null; } final DocIdSetIterator approxDISI = result.approxDocIdSet.iterator(); if (approxDISI == null) { return null; } final DocIdSetIterator exactIterator; if (result.exactDocIdSet != null) { // If both sets are the same, there's nothing to verify; we needn't return a TwoPhaseIterator if (result.approxDocIdSet == result.exactDocIdSet) { return new ConstantScoreScorer(this, score(), approxDISI); } exactIterator = result.exactDocIdSet.iterator(); assert exactIterator != null; } else { exactIterator = null; } final FunctionValues predFuncValues = predicateValueSource.getValues(valueSourceContext, context); final TwoPhaseIterator twoPhaseIterator = new TwoPhaseIterator(approxDISI) { @Override public boolean matches() throws IOException { final int doc = approxDISI.docID(); if (exactIterator != null) { if (exactIterator.docID() < doc) { exactIterator.advance(doc); } if (exactIterator.docID() == doc) { return true; } } return predFuncValues.boolVal(doc); } }; return new ConstantScoreScorer(this, score(), twoPhaseIterator); } }; } //This is a "Filter" but we don't use it as-such; the caller calls the constructor and then compute() and examines // the results which consists of two parts -- the approximated results, and a subset of exact matches. The // difference needs to be verified. // TODO refactor AVPTF to not be a Query/Filter? private static class IntersectsDifferentiatingFilter extends AbstractVisitingPrefixTreeFilter { public IntersectsDifferentiatingFilter(Shape queryShape, String fieldName, SpatialPrefixTree grid, int detailLevel, int prefixGridScanLevel) { super(queryShape, fieldName, grid, detailLevel, prefixGridScanLevel); } IntersectsDifferentiatingFilter.IntersectsDifferentiatingVisitor compute(LeafReaderContext context, Bits acceptDocs) throws IOException { final IntersectsDifferentiatingFilter.IntersectsDifferentiatingVisitor result = new IntersectsDifferentiatingFilter.IntersectsDifferentiatingVisitor(context, acceptDocs); result.getDocIdSet();//computes return result; } // TODO consider if IntersectsPrefixTreeFilter should simply do this and provide both sets class IntersectsDifferentiatingVisitor extends VisitorTemplate { DocIdSetBuilder approxBuilder = new DocIdSetBuilder(maxDoc); DocIdSetBuilder exactBuilder = new DocIdSetBuilder(maxDoc); boolean approxIsEmpty = true; boolean exactIsEmpty = true; DocIdSet exactDocIdSet; DocIdSet approxDocIdSet; public IntersectsDifferentiatingVisitor(LeafReaderContext context, Bits acceptDocs) throws IOException { super(context, acceptDocs); } @Override protected void start() throws IOException { } @Override protected DocIdSet finish() throws IOException { if (exactIsEmpty) { exactDocIdSet = null; } else { exactDocIdSet = exactBuilder.build(); } if (approxIsEmpty) { approxDocIdSet = exactDocIdSet;//optimization } else { if (exactDocIdSet != null) { approxBuilder.add(exactDocIdSet.iterator()); } approxDocIdSet = approxBuilder.build(); } return null;//unused in this weird re-use of AVPTF } @Override protected boolean visitPrefix(Cell cell) throws IOException { if (cell.getShapeRel() == SpatialRelation.WITHIN) { exactIsEmpty = false; collectDocs(exactBuilder);//note: we'll add exact to approx on finish() return false; } else if (cell.getLevel() == detailLevel) { approxIsEmpty = false; collectDocs(approxBuilder); return false; } return true; } @Override protected void visitLeaf(Cell cell) throws IOException { if (cell.getShapeRel() == SpatialRelation.WITHIN) { exactIsEmpty = false; collectDocs(exactBuilder);//note: we'll add exact to approx on finish() } else { approxIsEmpty = false; collectDocs(approxBuilder); } } } @Override public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException { throw new IllegalStateException(); } @Override public String toString(String field) { throw new IllegalStateException(); } } }
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.hbaseoutput; import java.util.ArrayList; import java.util.List; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.namedcluster.model.NamedCluster; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.hbaseinput.Messages; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.namedcluster.NamedClusterUIHelper; import org.pentaho.di.ui.core.namedcluster.NamedClusterWidget; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.hbase.mapping.ConfigurationProducer; import org.pentaho.hbase.mapping.FieldProducer; import org.pentaho.hbase.mapping.MappingAdmin; import org.pentaho.hbase.mapping.MappingEditor; import org.pentaho.hbase.shim.api.Mapping; import org.pentaho.hbase.shim.spi.HBaseConnection; /** * Dialog class for HBaseOutput * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ public class HBaseOutputDialog extends BaseStepDialog implements StepDialogInterface, ConfigurationProducer, FieldProducer { private final HBaseOutputMeta m_currentMeta; private final HBaseOutputMeta m_originalMeta; private final HBaseOutputMeta m_configurationMeta; /** various UI bits and pieces for the dialog */ private Label m_stepnameLabel; private Text m_stepnameText; // The tabs of the dialog private CTabFolder m_wTabFolder; private CTabItem m_wConfigTab; private CTabItem m_editorTab; NamedClusterWidget namedClusterWidget; // Core config line private Button m_coreConfigBut; private TextVar m_coreConfigText; // Default config line private Button m_defaultConfigBut; private TextVar m_defaultConfigText; // Table name line private Button m_mappedTableNamesBut; private CCombo m_mappedTableNamesCombo; // Mapping name line private Button m_mappingNamesBut; private CCombo m_mappingNamesCombo; /** Store the mapping information in the step's meta data */ private Button m_storeMappingInStepMetaData; // Disable write to WAL check box private Button m_disableWriteToWALBut; // Write buffer size line private TextVar m_writeBufferSizeText; // mapping editor composite private MappingEditor m_mappingEditor; public HBaseOutputDialog( Shell parent, Object in, TransMeta tr, String name ) { super( parent, (BaseStepMeta) in, tr, name ); m_currentMeta = (HBaseOutputMeta) in; m_originalMeta = (HBaseOutputMeta) m_currentMeta.clone(); m_configurationMeta = (HBaseOutputMeta) m_currentMeta.clone(); } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX ); props.setLook( shell ); setShellImage( shell, m_currentMeta ); // used to listen to a text field (m_wStepname) ModifyListener lsMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); } }; changed = m_currentMeta.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.Shell.Title" ) ); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line m_stepnameLabel = new Label( shell, SWT.RIGHT ); m_stepnameLabel.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.StepName.Label" ) ); props.setLook( m_stepnameLabel ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( middle, -margin ); fd.top = new FormAttachment( 0, margin ); m_stepnameLabel.setLayoutData( fd ); m_stepnameText = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); m_stepnameText.setText( stepname ); props.setLook( m_stepnameText ); m_stepnameText.addModifyListener( lsMod ); // format the text field fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, 0 ); m_stepnameText.setLayoutData( fd ); m_wTabFolder = new CTabFolder( shell, SWT.BORDER ); props.setLook( m_wTabFolder, Props.WIDGET_STYLE_TAB ); m_wTabFolder.setSimple( false ); // Start of the config tab m_wConfigTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_wConfigTab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.ConfigTab.TabTitle" ) ); Composite wConfigComp = new Composite( m_wTabFolder, SWT.NONE ); props.setLook( wConfigComp ); FormLayout configLayout = new FormLayout(); configLayout.marginWidth = 3; configLayout.marginHeight = 3; wConfigComp.setLayout( configLayout ); Label namedClusterLab = new Label( wConfigComp, SWT.RIGHT ); namedClusterLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.NamedCluster.Label" ) ); namedClusterLab.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.NamedCluster.TipText" ) ); props.setLook( namedClusterLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 10 ); fd.right = new FormAttachment( middle, -margin ); namedClusterLab.setLayoutData( fd ); namedClusterWidget = NamedClusterUIHelper.getNamedClusterUIFactory().createNamedClusterWidget( wConfigComp, false ); namedClusterWidget.initiate(); props.setLook( namedClusterWidget.getComposite() ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( middle, 0 ); namedClusterWidget.getComposite().setLayoutData( fd ); // core config line Label coreConfigLab = new Label( wConfigComp, SWT.RIGHT ); coreConfigLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.CoreConfig.Label" ) ); coreConfigLab .setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.CoreConfig.TipText" ) ); props.setLook( coreConfigLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( namedClusterWidget.getComposite(), margin ); fd.right = new FormAttachment( middle, -margin ); coreConfigLab.setLayoutData( fd ); m_coreConfigBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_coreConfigBut ); m_coreConfigBut.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "System.Button.Browse" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( namedClusterWidget.getComposite(), 0 ); m_coreConfigBut.setLayoutData( fd ); m_coreConfigBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { FileDialog dialog = new FileDialog( shell, SWT.OPEN ); String[] extensions = null; String[] filterNames = null; extensions = new String[2]; filterNames = new String[2]; extensions[0] = "*.xml"; filterNames[0] = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.FileType.XML" ); extensions[1] = "*"; filterNames[1] = BaseMessages.getString( HBaseOutputMeta.PKG, "System.FileType.AllFiles" ); dialog.setFilterExtensions( extensions ); if ( dialog.open() != null ) { m_coreConfigText.setText( dialog.getFilterPath() + System.getProperty( "file.separator" ) + dialog.getFileName() ); } } } ); m_coreConfigText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( m_coreConfigText ); m_coreConfigText.addModifyListener( lsMod ); // set the tool tip to the contents with any env variables expanded m_coreConfigText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_coreConfigText.setToolTipText( transMeta.environmentSubstitute( m_coreConfigText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( namedClusterWidget.getComposite(), margin ); fd.right = new FormAttachment( m_coreConfigBut, -margin ); m_coreConfigText.setLayoutData( fd ); // default config line Label defaultConfigLab = new Label( wConfigComp, SWT.RIGHT ); defaultConfigLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DefaultConfig.Label" ) ); defaultConfigLab.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DefaultConfig.TipText" ) ); props.setLook( defaultConfigLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_coreConfigText, margin ); fd.right = new FormAttachment( middle, -margin ); defaultConfigLab.setLayoutData( fd ); m_defaultConfigBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_defaultConfigBut ); m_defaultConfigBut.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "System.Button.Browse" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( m_coreConfigText, 0 ); m_defaultConfigBut.setLayoutData( fd ); m_defaultConfigBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { FileDialog dialog = new FileDialog( shell, SWT.OPEN ); String[] extensions = null; String[] filterNames = null; extensions = new String[2]; filterNames = new String[2]; extensions[0] = "*.xml"; filterNames[0] = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.FileType.XML" ); extensions[1] = "*"; filterNames[1] = BaseMessages.getString( HBaseOutputMeta.PKG, "System.FileType.AllFiles" ); dialog.setFilterExtensions( extensions ); if ( dialog.open() != null ) { m_defaultConfigText.setText( dialog.getFilterPath() + System.getProperty( "file.separator" ) + dialog.getFileName() ); } } } ); m_defaultConfigText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( m_defaultConfigText ); m_defaultConfigText.addModifyListener( lsMod ); // set the tool tip to the contents with any env variables expanded m_defaultConfigText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_defaultConfigText.setToolTipText( transMeta.environmentSubstitute( m_defaultConfigText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_coreConfigText, margin ); fd.right = new FormAttachment( m_defaultConfigBut, -margin ); m_defaultConfigText.setLayoutData( fd ); // table name Label tableNameLab = new Label( wConfigComp, SWT.RIGHT ); tableNameLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.TableName.Label" ) ); tableNameLab.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.TableName.TipText" ) ); props.setLook( tableNameLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_defaultConfigText, margin ); fd.right = new FormAttachment( middle, -margin ); tableNameLab.setLayoutData( fd ); m_mappedTableNamesBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_mappedTableNamesBut ); m_mappedTableNamesBut.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.TableName.Button" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( m_defaultConfigText, 0 ); m_mappedTableNamesBut.setLayoutData( fd ); m_mappedTableNamesCombo = new CCombo( wConfigComp, SWT.BORDER ); props.setLook( m_mappedTableNamesCombo ); m_mappedTableNamesCombo.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_mappedTableNamesCombo.setToolTipText( transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_defaultConfigText, margin ); fd.right = new FormAttachment( m_mappedTableNamesBut, -margin ); m_mappedTableNamesCombo.setLayoutData( fd ); m_mappedTableNamesBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { setupMappedTableNames(); } } ); // mapping name Label mappingNameLab = new Label( wConfigComp, SWT.RIGHT ); mappingNameLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.MappingName.Label" ) ); mappingNameLab.setToolTipText( BaseMessages .getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.MappingName.TipText" ) ); props.setLook( mappingNameLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_mappedTableNamesCombo, margin ); fd.right = new FormAttachment( middle, -margin ); mappingNameLab.setLayoutData( fd ); m_mappingNamesBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_mappingNamesBut ); m_mappingNamesBut.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.MappingName.Button" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( m_mappedTableNamesCombo, 0 ); m_mappingNamesBut.setLayoutData( fd ); m_mappingNamesBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { setupMappingNamesForTable( false ); } } ); m_mappingNamesCombo = new CCombo( wConfigComp, SWT.BORDER ); props.setLook( m_mappingNamesCombo ); m_mappingNamesCombo.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_mappingNamesCombo.setToolTipText( transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) ); m_storeMappingInStepMetaData.setSelection( false ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_mappedTableNamesCombo, margin ); fd.right = new FormAttachment( m_mappingNamesBut, -margin ); m_mappingNamesCombo.setLayoutData( fd ); // store mapping in meta data Label storeMapping = new Label( wConfigComp, SWT.RIGHT ); storeMapping.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.StoreMapping.Label" ) ); storeMapping .setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.StoreMapping.TipText" ) ); props.setLook( storeMapping ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_mappingNamesCombo, margin ); fd.right = new FormAttachment( middle, -margin ); storeMapping.setLayoutData( fd ); m_storeMappingInStepMetaData = new Button( wConfigComp, SWT.CHECK ); props.setLook( m_storeMappingInStepMetaData ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_mappingNamesCombo, margin ); m_storeMappingInStepMetaData.setLayoutData( fd ); // disable write to WAL Label disableWALLab = new Label( wConfigComp, SWT.RIGHT ); disableWALLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DisableWAL.Label" ) ); disableWALLab .setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DisableWAL.TipText" ) ); props.setLook( disableWALLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_storeMappingInStepMetaData, margin ); fd.right = new FormAttachment( middle, -margin ); disableWALLab.setLayoutData( fd ); m_disableWriteToWALBut = new Button( wConfigComp, SWT.CHECK | SWT.CENTER ); m_disableWriteToWALBut.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DisableWAL.TipText" ) ); props.setLook( m_disableWriteToWALBut ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_storeMappingInStepMetaData, margin ); // fd.right = new FormAttachment(middle, -margin); m_disableWriteToWALBut.setLayoutData( fd ); // write buffer size line Label writeBufferLab = new Label( wConfigComp, SWT.RIGHT ); writeBufferLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.WriteBufferSize.Label" ) ); writeBufferLab.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.WriteBufferSize.TipText" ) ); props.setLook( writeBufferLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_disableWriteToWALBut, margin ); fd.right = new FormAttachment( middle, -margin ); writeBufferLab.setLayoutData( fd ); m_writeBufferSizeText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( m_writeBufferSizeText ); m_writeBufferSizeText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_writeBufferSizeText.setToolTipText( transMeta.environmentSubstitute( m_writeBufferSizeText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_disableWriteToWALBut, margin ); fd.right = new FormAttachment( 100, 0 ); m_writeBufferSizeText.setLayoutData( fd ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, 0 ); wConfigComp.setLayoutData( fd ); wConfigComp.layout(); m_wConfigTab.setControl( wConfigComp ); // mapping editor tab m_editorTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_editorTab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.MappingEditorTab.TabTitle" ) ); m_mappingEditor = new MappingEditor( shell, m_wTabFolder, this, this, SWT.FULL_SELECTION | SWT.MULTI, true, props, transMeta ); fd = new FormData(); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( 0, 0 ); m_mappingEditor.setLayoutData( fd ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.bottom = new FormAttachment( 100, -margin * 2 ); fd.right = new FormAttachment( 100, 0 ); m_mappingEditor.setLayoutData( fd ); m_mappingEditor.layout(); m_editorTab.setControl( m_mappingEditor ); // ----------------- fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_stepnameText, margin ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, -50 ); m_wTabFolder.setLayoutData( fd ); // Buttons inherited from BaseStepDialog wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "System.Button.OK" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "System.Button.Cancel" ) ); setButtonPositions( new Button[] { wOK, wCancel }, margin, m_wTabFolder ); // Add listeners lsCancel = new Listener() { public void handleEvent( Event e ) { cancel(); } }; lsOK = new Listener() { public void handleEvent( Event e ) { ok(); } }; wCancel.addListener( SWT.Selection, lsCancel ); wOK.addListener( SWT.Selection, lsOK ); lsDef = new SelectionAdapter() { @Override public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; m_stepnameText.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { @Override public void shellClosed( ShellEvent e ) { cancel(); } } ); m_wTabFolder.setSelection( 0 ); setSize(); getData(); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } protected void cancel() { stepname = null; m_currentMeta.setChanged( changed ); dispose(); } protected void ok() { if ( Const.isEmpty( m_stepnameText.getText() ) ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "System.StepJobEntryNameMissing.Title" ) ); mb.setMessage( BaseMessages.getString( HBaseOutputMeta.PKG, "System.JobEntryNameMissing.Msg" ) ); mb.open(); return; } if ( namedClusterWidget.getSelectedNamedCluster() == null ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "Dialog.Error" ) ); mb.setMessage( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.NamedClusterNotSelected.Msg" ) ); mb.open(); return; } else { NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); if ( StringUtils.isEmpty( nc.getZooKeeperHost() ) ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "Dialog.Error" ) ); mb.setMessage( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.NamedClusterMissingValues.Msg" ) ); mb.open(); return; } } stepname = m_stepnameText.getText(); updateMetaConnectionDetails( m_currentMeta ); if ( m_storeMappingInStepMetaData.getSelection() ) { if ( Const.isEmpty( m_mappingNamesCombo.getText() ) ) { List<String> problems = new ArrayList<String>(); Mapping toSet = m_mappingEditor.getMapping( false, problems ); if ( problems.size() > 0 ) { StringBuffer p = new StringBuffer(); for ( String s : problems ) { p.append( s ).append( "\n" ); } MessageDialog md = new MessageDialog( shell, BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.Error.IssuesWithMapping.Title" ), null, BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.Error.IssuesWithMapping" ) + ":\n\n" + p.toString(), MessageDialog.WARNING, new String[] { BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.Error.IssuesWithMapping.ButtonOK" ), BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.Error.IssuesWithMapping.ButtonCancel" ) }, 0 ); MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() ); int idx = md.open() & 0xFF; if ( idx == 1 || idx == 255 /* 255 = escape pressed */ ) { return; // Cancel } } m_currentMeta.setMapping( toSet ); } else { HBaseConnection connection = null; try { connection = getHBaseConnection(); MappingAdmin admin = new MappingAdmin( connection ); Mapping current = null; current = admin.getMapping( transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() ), transMeta .environmentSubstitute( m_mappingNamesCombo.getText() ) ); m_currentMeta.setMapping( current ); m_currentMeta.setTargetMappingName( "" ); } catch ( Exception e ) { logError( Messages.getString( "HBaseOutputDialog.ErrorMessage.UnableToGetMapping" ) + " \"" + transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() + "," + transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) + "\"" ), e ); new ErrorDialog( shell, Messages.getString( "HBaseOutputDialog.ErrorMessage.UnableToGetMapping" ), Messages .getString( "HBaseOutputDialog.ErrorMessage.UnableToGetMapping" ) + " \"" + transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() + "," + transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) + "\"" ), e ); } finally { try { if ( connection != null ) { connection.close(); } } catch ( Exception e ) { String msg = Messages.getString( "HBaseInputDialog.ErrorMessage.FailedClosingHBaseConnection" ); logError( msg, e ); new ErrorDialog( shell, msg, msg, e ); } } } } else { // we're going to use a mapping stored in HBase - null out any stored // mapping m_currentMeta.setMapping( null ); } if ( !m_originalMeta.equals( m_currentMeta ) ) { m_currentMeta.setChanged(); changed = m_currentMeta.hasChanged(); } dispose(); } protected void updateMetaConnectionDetails( HBaseOutputMeta meta ) { if ( Const.isEmpty( m_stepnameText.getText() ) ) { return; } NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); if ( nc != null ) { meta.setClusterName( nc.getName() ); meta.setZookeeperHosts( nc.getZooKeeperHost() ); meta.setZookeeperPort( nc.getZooKeeperPort() ); } meta.setCoreConfigURL( m_coreConfigText.getText() ); meta.setDefaulConfigURL( m_defaultConfigText.getText() ); meta.setTargetTableName( m_mappedTableNamesCombo.getText() ); meta.setTargetMappingName( m_mappingNamesCombo.getText() ); meta.setDisableWriteToWAL( m_disableWriteToWALBut.getSelection() ); meta.setWriteBufferSize( m_writeBufferSizeText.getText() ); } private void getData() { namedClusterWidget.setSelectedNamedCluster( m_currentMeta.getClusterName() ); if ( !Const.isEmpty( m_currentMeta.getCoreConfigURL() ) ) { m_coreConfigText.setText( m_currentMeta.getCoreConfigURL() ); } if ( !Const.isEmpty( m_currentMeta.getDefaultConfigURL() ) ) { m_defaultConfigText.setText( m_currentMeta.getDefaultConfigURL() ); } if ( !Const.isEmpty( m_currentMeta.getTargetTableName() ) ) { m_mappedTableNamesCombo.setText( m_currentMeta.getTargetTableName() ); } if ( !Const.isEmpty( m_currentMeta.getTargetMappingName() ) ) { m_mappingNamesCombo.setText( m_currentMeta.getTargetMappingName() ); } m_disableWriteToWALBut.setSelection( m_currentMeta.getDisableWriteToWAL() ); if ( !Const.isEmpty( m_currentMeta.getWriteBufferSize() ) ) { m_writeBufferSizeText.setText( m_currentMeta.getWriteBufferSize() ); } if ( Const.isEmpty( m_currentMeta.getTargetMappingName() ) && m_currentMeta.getMapping() != null ) { m_mappingEditor.setMapping( m_currentMeta.getMapping() ); m_storeMappingInStepMetaData.setSelection( true ); } } public HBaseConnection getHBaseConnection() throws Exception { /* Configuration conf = null; */ HBaseConnection conf = null; /* * URL coreConf = null; URL defaultConf = null; */ String coreConf = ""; String defaultConf = ""; String zookeeperHosts = ""; String zookeeperPort = ""; if ( !Const.isEmpty( m_coreConfigText.getText() ) ) { coreConf = transMeta.environmentSubstitute( m_coreConfigText.getText() ); } if ( !Const.isEmpty( m_defaultConfigText.getText() ) ) { defaultConf = transMeta.environmentSubstitute( m_defaultConfigText.getText() ); } NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); if ( nc != null ) { zookeeperHosts = transMeta.environmentSubstitute( nc.getZooKeeperHost() ); zookeeperPort = transMeta.environmentSubstitute( nc.getZooKeeperPort() ); } if ( Const.isEmpty( zookeeperHosts ) && Const.isEmpty( coreConf ) && Const.isEmpty( defaultConf ) ) { throw new Exception( BaseMessages.getString( HBaseOutputMeta.PKG, "MappingDialog.Error.Message.CantConnectNoConnectionDetailsProvided" ) ); } conf = HBaseOutputData.getHBaseConnection( zookeeperHosts, zookeeperPort, coreConf, defaultConf, null ); return conf; } private void setupMappedTableNames() { m_mappedTableNamesCombo.removeAll(); HBaseConnection connection = null; try { connection = getHBaseConnection(); MappingAdmin admin = new MappingAdmin( connection ); admin.setConnection( connection ); Set<String> tableNames = admin.getMappedTables(); for ( String s : tableNames ) { m_mappedTableNamesCombo.add( s ); } } catch ( Exception ex ) { logError( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.ErrorMessage.UnableToConnect" ), ex ); new ErrorDialog( shell, BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.ErrorMessage." + "UnableToConnect" ), BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.ErrorMessage.UnableToConnect" ), ex ); } finally { try { if ( connection != null ) { connection.close(); } } catch ( Exception e ) { String msg = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.ErrorMessage.FailedClosingHBaseConnection" ); logError( msg, e ); new ErrorDialog( shell, msg, msg, e ); } } } private void setupMappingNamesForTable( boolean quiet ) { m_mappingNamesCombo.removeAll(); if ( !Const.isEmpty( m_mappedTableNamesCombo.getText() ) ) { HBaseConnection connection = null; try { connection = getHBaseConnection(); MappingAdmin admin = new MappingAdmin( connection ); List<String> mappingNames = admin.getMappingNames( m_mappedTableNamesCombo.getText().trim() ); for ( String n : mappingNames ) { m_mappingNamesCombo.add( n ); } } catch ( Exception ex ) { if ( !quiet ) { logError( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.ErrorMessage.UnableToConnect" ), ex ); new ErrorDialog( shell, BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.ErrorMessage." + "UnableToConnect" ), BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.ErrorMessage.UnableToConnect" ), ex ); } } finally { try { if ( connection != null ) { connection.close(); } } catch ( Exception e ) { if ( !quiet ) { String msg = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.ErrorMessage.FailedClosingHBaseConnection" ); logError( msg, e ); new ErrorDialog( shell, msg, msg, e ); } } } } } public RowMetaInterface getIncomingFields() { StepMeta stepMeta = transMeta.findStep( stepname ); RowMetaInterface result = null; try { if ( stepMeta != null ) { result = transMeta.getPrevStepFields( stepMeta ); } } catch ( KettleException ex ) { // quietly ignore } return result; } public String getCurrentConfiguration() { updateMetaConnectionDetails( m_configurationMeta ); return m_configurationMeta.getXML(); } }
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.project; import com.google.common.collect.Lists; import com.google.gerrit.common.PageLinks; import com.google.gerrit.common.data.AccessSection; import com.google.gerrit.common.data.Capable; import com.google.gerrit.common.data.ContributorAgreement; import com.google.gerrit.common.data.GroupReference; import com.google.gerrit.common.data.Permission; import com.google.gerrit.common.data.PermissionRule; import com.google.gerrit.common.data.PermissionRule.Action; import com.google.gerrit.reviewdb.client.AccountGroup; import com.google.gerrit.reviewdb.client.Branch; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.InternalUser; import com.google.gerrit.server.config.CanonicalWebUrl; import com.google.gerrit.server.config.GitReceivePackGroups; import com.google.gerrit.server.config.GitUploadPackGroups; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.assistedinject.Assisted; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; /** Access control management for a user accessing a project's data. */ public class ProjectControl { public static final int VISIBLE = 1 << 0; public static final int OWNER = 1 << 1; public static class GenericFactory { private final ProjectCache projectCache; @Inject GenericFactory(final ProjectCache pc) { projectCache = pc; } public ProjectControl controlFor(Project.NameKey nameKey, CurrentUser user) throws NoSuchProjectException { final ProjectState p = projectCache.get(nameKey); if (p == null) { throw new NoSuchProjectException(nameKey); } return p.controlFor(user); } } public static class Factory { private final Provider<PerRequestProjectControlCache> userCache; @Inject Factory(Provider<PerRequestProjectControlCache> uc) { userCache = uc; } public ProjectControl controlFor(final Project.NameKey nameKey) throws NoSuchProjectException { return userCache.get().get(nameKey); } public ProjectControl validateFor(final Project.NameKey nameKey) throws NoSuchProjectException { return validateFor(nameKey, VISIBLE); } public ProjectControl ownerFor(final Project.NameKey nameKey) throws NoSuchProjectException { return validateFor(nameKey, OWNER); } public ProjectControl validateFor(final Project.NameKey nameKey, final int need) throws NoSuchProjectException { final ProjectControl c = controlFor(nameKey); if ((need & VISIBLE) == VISIBLE && c.isVisible()) { return c; } if ((need & OWNER) == OWNER && c.isOwner()) { return c; } throw new NoSuchProjectException(nameKey); } } interface AssistedFactory { ProjectControl create(CurrentUser who, ProjectState ps); } private final Set<AccountGroup.UUID> uploadGroups; private final Set<AccountGroup.UUID> receiveGroups; private final String canonicalWebUrl; private final CurrentUser user; private final ProjectState state; private final PermissionCollection.Factory permissionFilter; private final Collection<ContributorAgreement> contributorAgreements; private List<SectionMatcher> allSections; private Map<String, RefControl> refControls; private Boolean declaredOwner; @Inject ProjectControl(@GitUploadPackGroups Set<AccountGroup.UUID> uploadGroups, @GitReceivePackGroups Set<AccountGroup.UUID> receiveGroups, final ProjectCache pc, final PermissionCollection.Factory permissionFilter, @CanonicalWebUrl @Nullable final String canonicalWebUrl, @Assisted CurrentUser who, @Assisted ProjectState ps) { this.uploadGroups = uploadGroups; this.receiveGroups = receiveGroups; this.permissionFilter = permissionFilter; this.contributorAgreements = pc.getAllProjects().getConfig().getContributorAgreements(); this.canonicalWebUrl = canonicalWebUrl; user = who; state = ps; } public ProjectControl forUser(CurrentUser who) { ProjectControl r = state.controlFor(who); // Not per-user, and reusing saves lookup time. r.allSections = allSections; return r; } public ChangeControl controlFor(final Change change) { return new ChangeControl(controlForRef(change.getDest()), change); } public RefControl controlForRef(Branch.NameKey ref) { return controlForRef(ref.get()); } public RefControl controlForRef(String refName) { if (refControls == null) { refControls = new HashMap<String, RefControl>(); } RefControl ctl = refControls.get(refName); if (ctl == null) { PermissionCollection relevant = permissionFilter.filter(access(), refName, user.getUserName()); ctl = new RefControl(this, refName, relevant); refControls.put(refName, ctl); } return ctl; } public CurrentUser getCurrentUser() { return user; } public ProjectState getProjectState() { return state; } public Project getProject() { return state.getProject(); } private boolean isHidden() { return getProject().getState().equals(Project.State.HIDDEN); } /** Can this user see this project exists? */ public boolean isVisible() { return (user instanceof InternalUser || canPerformOnAnyRef(Permission.READ)) && !isHidden(); } public boolean canAddRefs() { return (canPerformOnAnyRef(Permission.CREATE) || isOwnerAnyRef()); } /** Can this user see all the refs in this projects? */ public boolean allRefsAreVisible() { return allRefsAreVisibleExcept(Collections.<String> emptySet()); } public boolean allRefsAreVisibleExcept(Set<String> except) { return user instanceof InternalUser || canPerformOnAllRefs(Permission.READ, except); } /** Is this user a project owner? Ownership does not imply {@link #isVisible()} */ public boolean isOwner() { return isDeclaredOwner() || user.getCapabilities().canAdministrateServer(); } private boolean isDeclaredOwner() { if (declaredOwner == null) { declaredOwner = state.isOwner(user.getEffectiveGroups()); } return declaredOwner; } /** Does this user have ownership on at least one reference name? */ public boolean isOwnerAnyRef() { return canPerformOnAnyRef(Permission.OWNER) || user.getCapabilities().canAdministrateServer(); } /** @return true if the user can upload to at least one reference */ public Capable canPushToAtLeastOneRef() { if (! canPerformOnAnyRef(Permission.PUSH) && ! canPerformOnAnyRef(Permission.PUSH_TAG)) { String pName = state.getProject().getName(); return new Capable("Upload denied for project '" + pName + "'"); } Project project = state.getProject(); if (project.isUseContributorAgreements()) { return verifyActiveContributorAgreement(); } return Capable.OK; } public Set<GroupReference> getAllGroups() { final Set<GroupReference> all = new HashSet<GroupReference>(); for (final SectionMatcher matcher : access()) { final AccessSection section = matcher.section; for (final Permission permission : section.getPermissions()) { for (final PermissionRule rule : permission.getRules()) { all.add(rule.getGroup()); } } } return all; } private Capable verifyActiveContributorAgreement() { if (! (user instanceof IdentifiedUser)) { return new Capable("Must be logged in to verify Contributor Agreement"); } final IdentifiedUser iUser = (IdentifiedUser) user; boolean hasContactInfo = !missing(iUser.getAccount().getFullName()) && !missing(iUser.getAccount().getPreferredEmail()) && iUser.getAccount().isContactFiled(); List<AccountGroup.UUID> okGroupIds = Lists.newArrayList(); List<AccountGroup.UUID> missingInfoGroupIds = Lists.newArrayList(); for (ContributorAgreement ca : contributorAgreements) { List<AccountGroup.UUID> groupIds; if (hasContactInfo || !ca.isRequireContactInformation()) { groupIds = okGroupIds; } else { groupIds = missingInfoGroupIds; } for (PermissionRule rule : ca.getAccepted()) { if ((rule.getAction() == Action.ALLOW) && (rule.getGroup() != null) && (rule.getGroup().getUUID() != null)) { groupIds.add(new AccountGroup.UUID(rule.getGroup().getUUID().get())); } } } if (iUser.getEffectiveGroups().containsAnyOf(okGroupIds)) { return Capable.OK; } if (iUser.getEffectiveGroups().containsAnyOf(missingInfoGroupIds)) { final StringBuilder msg = new StringBuilder(); for (ContributorAgreement ca : contributorAgreements) { if (ca.isRequireContactInformation()) { msg.append(ca.getName()); break; } } msg.append(" contributor agreement requires"); msg.append(" current contact information.\n"); if (canonicalWebUrl != null) { msg.append("\nPlease review your contact information"); msg.append(":\n\n "); msg.append(canonicalWebUrl); msg.append("#"); msg.append(PageLinks.SETTINGS_CONTACT); msg.append("\n"); } msg.append("\n"); return new Capable(msg.toString()); } final StringBuilder msg = new StringBuilder(); msg.append(" A Contributor Agreement must be completed before uploading"); if (canonicalWebUrl != null) { msg.append(":\n\n "); msg.append(canonicalWebUrl); msg.append("#"); msg.append(PageLinks.SETTINGS_AGREEMENTS); msg.append("\n"); } else { msg.append("."); } msg.append("\n"); return new Capable(msg.toString()); } private static boolean missing(final String value) { return value == null || value.trim().equals(""); } private boolean canPerformOnAnyRef(String permissionName) { for (SectionMatcher matcher : access()) { AccessSection section = matcher.section; Permission permission = section.getPermission(permissionName); if (permission == null) { continue; } for (PermissionRule rule : permission.getRules()) { if (rule.isBlock() || rule.isDeny() || !match(rule)) { continue; } // Being in a group that was granted this permission is only an // approximation. There might be overrides and doNotInherit // that would render this to be false. // if (controlForRef(section.getName()).canPerform(permissionName)) { return true; } else { break; } } } return false; } private boolean canPerformOnAllRefs(String permission, Set<String> except) { boolean canPerform = false; Set<String> patterns = allRefPatterns(permission); if (patterns.contains(AccessSection.ALL)) { // Only possible if granted on the pattern that // matches every possible reference. Check all // patterns also have the permission. // for (final String pattern : patterns) { if (controlForRef(pattern).canPerform(permission)) { canPerform = true; } else if (except.contains(pattern)) { continue; } else { return false; } } } return canPerform; } private Set<String> allRefPatterns(String permissionName) { Set<String> all = new HashSet<String>(); for (SectionMatcher matcher : access()) { AccessSection section = matcher.section; Permission permission = section.getPermission(permissionName); if (permission != null) { all.add(section.getName()); } } return all; } private List<SectionMatcher> access() { if (allSections == null) { allSections = state.getAllSections(); } return allSections; } boolean match(PermissionRule rule) { return match(rule.getGroup().getUUID()); } boolean match(AccountGroup.UUID uuid) { if (AccountGroup.PROJECT_OWNERS.equals(uuid)) { return isDeclaredOwner(); } else { return user.getEffectiveGroups().contains(uuid); } } public boolean canRunUploadPack() { for (AccountGroup.UUID group : uploadGroups) { if (match(group)) { return true; } } return false; } public boolean canRunReceivePack() { for (AccountGroup.UUID group : receiveGroups) { if (match(group)) { return true; } } return false; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.watcher.actions.pagerduty; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.actions.Action; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.core.watcher.execution.Wid; import org.elasticsearch.xpack.core.watcher.watch.Payload; import org.elasticsearch.xpack.watcher.common.http.HttpProxy; import org.elasticsearch.xpack.watcher.common.http.HttpRequest; import org.elasticsearch.xpack.watcher.common.http.HttpResponse; import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.notification.pagerduty.IncidentEvent; import org.elasticsearch.xpack.watcher.notification.pagerduty.IncidentEventContext; import org.elasticsearch.xpack.watcher.notification.pagerduty.IncidentEventDefaults; import org.elasticsearch.xpack.watcher.notification.pagerduty.PagerDutyAccount; import org.elasticsearch.xpack.watcher.notification.pagerduty.PagerDutyService; import org.elasticsearch.xpack.watcher.notification.pagerduty.SentEvent; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Before; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.pagerDutyAction; import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.mockExecutionContextBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class PagerDutyActionTests extends ESTestCase { private PagerDutyService service; @Before public void init() throws Exception { service = mock(PagerDutyService.class); } public void testExecute() throws Exception { final String accountName = "account1"; TextTemplateEngine templateEngine = mock(TextTemplateEngine.class); TextTemplate description = new TextTemplate("_description"); IncidentEvent.Template.Builder eventBuilder = new IncidentEvent.Template.Builder(description); boolean attachPayload = randomBoolean(); eventBuilder.setAttachPayload(attachPayload); eventBuilder.setAccount(accountName); IncidentEvent.Template eventTemplate = eventBuilder.build(); PagerDutyAction action = new PagerDutyAction(eventTemplate); ExecutablePagerDutyAction executable = new ExecutablePagerDutyAction(action, logger, service, templateEngine); Map<String, Object> data = new HashMap<>(); Payload payload = new Payload.Simple(data); Map<String, Object> metadata = MapBuilder.<String, Object>newMapBuilder().put("_key", "_val").map(); DateTime now = DateTime.now(DateTimeZone.UTC); Wid wid = new Wid(randomAlphaOfLength(5), now); WatchExecutionContext ctx = mockExecutionContextBuilder(wid.watchId()) .wid(wid) .payload(payload) .time(wid.watchId(), now) .metadata(metadata) .buildMock(); Map<String, Object> ctxModel = new HashMap<>(); ctxModel.put("id", ctx.id().value()); ctxModel.put("watch_id", wid.watchId()); ctxModel.put("payload", data); ctxModel.put("metadata", metadata); ctxModel.put("execution_time", now); Map<String, Object> triggerModel = new HashMap<>(); triggerModel.put("triggered_time", now); triggerModel.put("scheduled_time", now); ctxModel.put("trigger", triggerModel); ctxModel.put("vars", Collections.emptyMap()); Map<String, Object> expectedModel = new HashMap<>(); expectedModel.put("ctx", ctxModel); when(templateEngine.render(description, expectedModel)).thenReturn(description.getTemplate()); IncidentEvent event = new IncidentEvent(description.getTemplate(), null, wid.watchId(), null, null, accountName, attachPayload, null, null); PagerDutyAccount account = mock(PagerDutyAccount.class); when(account.getDefaults()).thenReturn(new IncidentEventDefaults(Settings.EMPTY)); HttpResponse response = mock(HttpResponse.class); when(response.status()).thenReturn(200); HttpRequest request = mock(HttpRequest.class); SentEvent sentEvent = SentEvent.responded(event, request, response); when(account.send(event, payload, wid.watchId())).thenReturn(sentEvent); when(service.getAccount(accountName)).thenReturn(account); Action.Result result = executable.execute("_id", ctx, payload); assertThat(result, notNullValue()); assertThat(result, instanceOf(PagerDutyAction.Result.Executed.class)); assertThat(result.status(), equalTo(Action.Result.Status.SUCCESS)); assertThat(((PagerDutyAction.Result.Executed) result).sentEvent(), sameInstance(sentEvent)); } public void testParser() throws Exception { XContentBuilder builder = jsonBuilder().startObject(); String accountName = randomAlphaOfLength(10); builder.field("account", accountName); TextTemplate incidentKey = null; if (randomBoolean()) { incidentKey = new TextTemplate("_incident_key"); builder.field("incident_key", incidentKey); } TextTemplate description = null; if (randomBoolean()) { description = new TextTemplate("_description"); builder.field("description", description); } TextTemplate client = null; if (randomBoolean()) { client = new TextTemplate("_client"); builder.field("client", client); } TextTemplate clientUrl = null; if (randomBoolean()) { clientUrl = new TextTemplate("_client_url"); builder.field("client_url", clientUrl); } TextTemplate eventType = null; if (randomBoolean()) { eventType = new TextTemplate(randomFrom("trigger", "resolve", "acknowledge")); builder.field("event_type", eventType); } Boolean attachPayload = randomBoolean() ? null : randomBoolean(); if (attachPayload != null) { builder.field("attach_payload", attachPayload.booleanValue()); } HttpProxy proxy = null; if (randomBoolean()) { proxy = new HttpProxy("localhost", 8080); proxy.toXContent(builder, ToXContent.EMPTY_PARAMS); } IncidentEventContext.Template[] contexts = null; if (randomBoolean()) { contexts = new IncidentEventContext.Template[] { IncidentEventContext.Template.link(new TextTemplate("_href"), new TextTemplate("_text")), IncidentEventContext.Template.image(new TextTemplate("_src"), new TextTemplate("_href"), new TextTemplate("_alt")) }; String fieldName = randomBoolean() ? "contexts" : "context"; builder.array(fieldName, (Object) contexts); } builder.endObject(); BytesReference bytes = BytesReference.bytes(builder); logger.info("pagerduty action json [{}]", bytes.utf8ToString()); XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); parser.nextToken(); PagerDutyAction action = PagerDutyAction.parse("_watch", "_action", parser); assertThat(action, notNullValue()); assertThat(action.event.account, is(accountName)); assertThat(action.event, notNullValue()); assertThat(action.event, instanceOf(IncidentEvent.Template.class)); assertThat(action.event, is(new IncidentEvent.Template(description, eventType, incidentKey, client, clientUrl, accountName, attachPayload, contexts, proxy))); } public void testParserSelfGenerated() throws Exception { IncidentEvent.Template.Builder event = IncidentEvent.templateBuilder(randomAlphaOfLength(50)); if (randomBoolean()) { event.setIncidentKey(new TextTemplate(randomAlphaOfLength(50))); } if (randomBoolean()) { event.setClient(new TextTemplate(randomAlphaOfLength(50))); } if (randomBoolean()) { event.setClientUrl(new TextTemplate(randomAlphaOfLength(50))); } if (randomBoolean()) { event.setAttachPayload(randomBoolean()); } if (randomBoolean()) { event.addContext(IncidentEventContext.Template.link(new TextTemplate("_href"), new TextTemplate("_text"))); } if (randomBoolean()) { event.addContext(IncidentEventContext.Template.image(new TextTemplate("_src"), new TextTemplate("_href"), new TextTemplate("_alt"))); } if (randomBoolean()) { event.setEventType(new TextTemplate(randomAlphaOfLength(50))); } if (randomBoolean()) { event.setAccount(randomAlphaOfLength(50)).build(); } if (randomBoolean()) { event.setProxy(new HttpProxy("localhost", 8080)); } PagerDutyAction action = pagerDutyAction(event).build(); XContentBuilder jsonBuilder = jsonBuilder(); action.toXContent(jsonBuilder, ToXContent.EMPTY_PARAMS); XContentParser parser = createParser(jsonBuilder); parser.nextToken(); PagerDutyAction parsedAction = PagerDutyAction.parse("_w1", "_a1", parser); assertThat(parsedAction, notNullValue()); assertThat(parsedAction, is(action)); } public void testParserInvalid() throws Exception { try { XContentBuilder builder = jsonBuilder().startObject().field("unknown_field", "value").endObject(); XContentParser parser = createParser(builder); parser.nextToken(); PagerDutyAction.parse("_watch", "_action", parser); fail("Expected ElasticsearchParseException but did not happen"); } catch (ElasticsearchParseException e) { } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.andes.server.store; import java.io.File; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.configuration.PropertiesConfiguration; import org.wso2.andes.AMQException; import org.wso2.andes.common.AMQPFilterTypes; import org.wso2.andes.framing.AMQShortString; import org.wso2.andes.framing.BasicContentHeaderProperties; import org.wso2.andes.framing.ContentHeaderBody; import org.wso2.andes.framing.FieldTable; import org.wso2.andes.framing.abstraction.MessagePublishInfo; import org.wso2.andes.framing.amqp_8_0.BasicConsumeBodyImpl; import org.wso2.andes.server.binding.Binding; import org.wso2.andes.server.configuration.VirtualHostConfiguration; import org.wso2.andes.server.exchange.DirectExchange; import org.wso2.andes.server.exchange.Exchange; import org.wso2.andes.server.exchange.ExchangeRegistry; import org.wso2.andes.server.exchange.ExchangeType; import org.wso2.andes.server.exchange.TopicExchange; import org.wso2.andes.server.message.AMQMessage; import org.wso2.andes.server.message.MessageMetaData; import org.wso2.andes.server.queue.AMQPriorityQueue; import org.wso2.andes.server.queue.AMQQueue; import org.wso2.andes.server.queue.AMQQueueFactory; import org.wso2.andes.server.queue.BaseQueue; import org.wso2.andes.server.queue.ConflationQueue; import org.wso2.andes.server.queue.IncomingMessage; import org.wso2.andes.server.queue.QueueRegistry; import org.wso2.andes.server.queue.SimpleAMQQueue; import org.wso2.andes.server.registry.ApplicationRegistry; import org.wso2.andes.server.txn.AutoCommitTransaction; import org.wso2.andes.server.txn.ServerTransaction; import org.wso2.andes.server.util.InternalBrokerBaseCase; import org.wso2.andes.server.virtualhost.VirtualHost; import org.wso2.andes.util.FileUtils; import sun.reflect.generics.reflectiveObjects.NotImplementedException; /** * This tests the MessageStores by using the available interfaces. * * For persistent stores, it validates that Exchanges, Queues, Bindings and * Messages are persisted and recovered correctly. */ public class MessageStoreTest extends InternalBrokerBaseCase { public static final int DEFAULT_PRIORTY_LEVEL = 5; public static final String SELECTOR_VALUE = "Test = 'MST'"; public static final String LVQ_KEY = "MST-LVQ-KEY"; AMQShortString nonDurableExchangeName = new AMQShortString("MST-NonDurableDirectExchange"); AMQShortString directExchangeName = new AMQShortString("MST-DirectExchange"); AMQShortString topicExchangeName = new AMQShortString("MST-TopicExchange"); AMQShortString durablePriorityTopicQueueName = new AMQShortString("MST-PriorityTopicQueue-Durable"); AMQShortString durableTopicQueueName = new AMQShortString("MST-TopicQueue-Durable"); AMQShortString priorityTopicQueueName = new AMQShortString("MST-PriorityTopicQueue"); AMQShortString topicQueueName = new AMQShortString("MST-TopicQueue"); AMQShortString durableExclusiveQueueName = new AMQShortString("MST-Queue-Durable-Exclusive"); AMQShortString durablePriorityQueueName = new AMQShortString("MST-PriorityQueue-Durable"); AMQShortString durableLastValueQueueName = new AMQShortString("MST-LastValueQueue-Durable"); AMQShortString durableQueueName = new AMQShortString("MST-Queue-Durable"); AMQShortString priorityQueueName = new AMQShortString("MST-PriorityQueue"); AMQShortString queueName = new AMQShortString("MST-Queue"); AMQShortString directRouting = new AMQShortString("MST-direct"); AMQShortString topicRouting = new AMQShortString("MST-topic"); AMQShortString queueOwner = new AMQShortString("MST"); protected PropertiesConfiguration _config; public void setUp() throws Exception { super.setUp(); String storePath = System.getProperty("QPID_WORK") + "/" + getName(); _config = new PropertiesConfiguration(); _config.addProperty("store.class", getTestProfileMessageStoreClassName()); _config.addProperty("store.environment-path", storePath); cleanup(new File(storePath)); reloadVirtualHost(); } protected void reloadVirtualHost() { VirtualHost original = getVirtualHost(); if (getVirtualHost() != null) { try { getVirtualHost().close(); getVirtualHost().getApplicationRegistry(). getVirtualHostRegistry().unregisterVirtualHost(getVirtualHost()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } try { setVirtualHost(ApplicationRegistry.getInstance().createVirtualHost(new VirtualHostConfiguration(getClass().getName(), _config))); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } assertTrue("Virtualhost has not changed, reload was not successful", original != getVirtualHost()); } /** * Old MessageStoreTest segment which runs against both persistent and non-persistent stores * creating queues, exchanges and bindings and then verifying message delivery to them. */ public void testQueueExchangeAndBindingCreation() throws Exception { assertEquals("Should not be any existing queues", 0, getVirtualHost().getQueueRegistry().getQueues().size()); createAllQueues(); createAllTopicQueues(); //Register Non-Durable DirectExchange Exchange nonDurableExchange = createExchange(DirectExchange.TYPE, nonDurableExchangeName, false); bindAllQueuesToExchange(nonDurableExchange, directRouting); //Register DirectExchange Exchange directExchange = createExchange(DirectExchange.TYPE, directExchangeName, true); bindAllQueuesToExchange(directExchange, directRouting); //Register TopicExchange Exchange topicExchange = createExchange(TopicExchange.TYPE, topicExchangeName, true); bindAllTopicQueuesToExchange(topicExchange, topicRouting); //Send Message To NonDurable direct Exchange = persistent sendMessageOnExchange(nonDurableExchange, directRouting, true); // and non-persistent sendMessageOnExchange(nonDurableExchange, directRouting, false); //Send Message To direct Exchange = persistent sendMessageOnExchange(directExchange, directRouting, true); // and non-persistent sendMessageOnExchange(directExchange, directRouting, false); //Send Message To topic Exchange = persistent sendMessageOnExchange(topicExchange, topicRouting, true); // and non-persistent sendMessageOnExchange(topicExchange, topicRouting, false); //Ensure all the Queues have four messages (one transient, one persistent) x 2 exchange routings validateMessageOnQueues(4, true); //Ensure all the topics have two messages (one transient, one persistent) validateMessageOnTopics(2, true); assertEquals("Not all queues correctly registered", 10, getVirtualHost().getQueueRegistry().getQueues().size()); } /** * Tests message persistence by running the testQueueExchangeAndBindingCreation() method above * before reloading the virtual host and ensuring that the persistent messages were restored. * * More specific testing of message persistence is left to store-specific unit testing. */ public void testMessagePersistence() throws Exception { testQueueExchangeAndBindingCreation(); reloadVirtualHost(); //Validate durable queues and subscriptions still have the persistent messages validateMessageOnQueues(2, false); validateMessageOnTopics(1, false); } /** * Tests message removal by running the testMessagePersistence() method above before * clearing the queues, reloading the virtual host, and ensuring that the persistent * messages were removed from the queues. */ public void testMessageRemoval() throws Exception { testMessagePersistence(); QueueRegistry queueRegistry = getVirtualHost().getQueueRegistry(); assertEquals("Incorrect number of queues registered after recovery", 6, queueRegistry.getQueues().size()); //clear the queue queueRegistry.getQueue(durableQueueName).clearQueue(); //check the messages are gone validateMessageOnQueue(durableQueueName, 0); //reload and verify messages arent restored reloadVirtualHost(); validateMessageOnQueue(durableQueueName, 0); } /** * Tests queue persistence by creating a selection of queues with differing properties, both * durable and non durable, and ensuring that following the recovery process the correct queues * are present and any property manipulations (eg queue exclusivity) are correctly recovered. */ public void testQueuePersistence() throws Exception { assertEquals("Should not be any existing queues", 0, getVirtualHost().getQueueRegistry().getQueues().size()); //create durable and non durable queues/topics createAllQueues(); createAllTopicQueues(); //reload the virtual host, prompting recovery of the queues/topics reloadVirtualHost(); QueueRegistry queueRegistry = getVirtualHost().getQueueRegistry(); assertEquals("Incorrect number of queues registered after recovery", 6, queueRegistry.getQueues().size()); //Validate the non-Durable Queues were not recovered. assertNull("Non-Durable queue still registered:" + priorityQueueName, queueRegistry.getQueue(priorityQueueName)); assertNull("Non-Durable queue still registered:" + queueName, queueRegistry.getQueue(queueName)); assertNull("Non-Durable queue still registered:" + priorityTopicQueueName, queueRegistry.getQueue(priorityTopicQueueName)); assertNull("Non-Durable queue still registered:" + topicQueueName, queueRegistry.getQueue(topicQueueName)); //Validate normally expected properties of Queues/Topics validateDurableQueueProperties(); //Update the durable exclusive queue's exclusivity and verify it is persisted and recovered correctly setQueueExclusivity(false); validateQueueExclusivityProperty(false); //Reload the Virtualhost to recover the queues again reloadVirtualHost(); //verify the change was persisted and recovered correctly validateQueueExclusivityProperty(false); } /** * Tests queue removal by creating a durable queue, verifying it recovers, and * then removing it from the store, and ensuring that following the second reload * process it is not recovered. */ public void testDurableQueueRemoval() throws Exception { //Register Durable Queue createQueue(durableQueueName, false, true, false, false); QueueRegistry queueRegistry = getVirtualHost().getQueueRegistry(); assertEquals("Incorrect number of queues registered before recovery", 1, queueRegistry.getQueues().size()); reloadVirtualHost(); queueRegistry = getVirtualHost().getQueueRegistry(); assertEquals("Incorrect number of queues registered after first recovery", 1, queueRegistry.getQueues().size()); //test that removing the queue means it is not recovered next time getVirtualHost().getDurableConfigurationStore().removeQueue(queueRegistry.getQueue(durableQueueName)); reloadVirtualHost(); queueRegistry = getVirtualHost().getQueueRegistry(); assertEquals("Incorrect number of queues registered after second recovery", 0, queueRegistry.getQueues().size()); assertNull("Durable queue was not removed:" + durableQueueName, queueRegistry.getQueue(durableQueueName)); } /** * Tests exchange persistence by creating a selection of exchanges, both durable * and non durable, and ensuring that following the recovery process the correct * durable exchanges are still present. */ public void testExchangePersistence() throws Exception { int origExchangeCount = getVirtualHost().getExchangeRegistry().getExchangeNames().size(); Map<AMQShortString, Exchange> oldExchanges = createExchanges(); assertEquals("Incorrect number of exchanges registered before recovery", origExchangeCount + 3, getVirtualHost().getExchangeRegistry().getExchangeNames().size()); reloadVirtualHost(); //verify the exchanges present after recovery validateExchanges(origExchangeCount, oldExchanges); } /** * Tests exchange removal by creating a durable exchange, verifying it recovers, and * then removing it from the store, and ensuring that following the second reload * process it is not recovered. */ public void testDurableExchangeRemoval() throws Exception { int origExchangeCount = getVirtualHost().getExchangeRegistry().getExchangeNames().size(); createExchange(DirectExchange.TYPE, directExchangeName, true); ExchangeRegistry exchangeRegistry = getVirtualHost().getExchangeRegistry(); assertEquals("Incorrect number of exchanges registered before recovery", origExchangeCount + 1, exchangeRegistry.getExchangeNames().size()); reloadVirtualHost(); exchangeRegistry = getVirtualHost().getExchangeRegistry(); assertEquals("Incorrect number of exchanges registered after first recovery", origExchangeCount + 1, exchangeRegistry.getExchangeNames().size()); //test that removing the exchange means it is not recovered next time getVirtualHost().getDurableConfigurationStore().removeExchange(exchangeRegistry.getExchange(directExchangeName)); reloadVirtualHost(); exchangeRegistry = getVirtualHost().getExchangeRegistry(); assertEquals("Incorrect number of exchanges registered after second recovery", origExchangeCount, exchangeRegistry.getExchangeNames().size()); assertNull("Durable exchange was not removed:" + directExchangeName, exchangeRegistry.getExchange(directExchangeName)); } /** * Tests binding persistence by creating a selection of queues and exchanges, both durable * and non durable, then adding bindings with and without selectors before reloading the * virtual host and verifying that following the recovery process the correct durable * bindings (those for durable queues to durable exchanges) are still present. */ public void testBindingPersistence() throws Exception { int origExchangeCount = getVirtualHost().getExchangeRegistry().getExchangeNames().size(); createAllQueues(); createAllTopicQueues(); Map<AMQShortString, Exchange> exchanges = createExchanges(); Exchange nonDurableExchange = exchanges.get(nonDurableExchangeName); Exchange directExchange = exchanges.get(directExchangeName); Exchange topicExchange = exchanges.get(topicExchangeName); bindAllQueuesToExchange(nonDurableExchange, directRouting); bindAllQueuesToExchange(directExchange, directRouting); bindAllTopicQueuesToExchange(topicExchange, topicRouting); assertEquals("Incorrect number of exchanges registered before recovery", origExchangeCount + 3, getVirtualHost().getExchangeRegistry().getExchangeNames().size()); reloadVirtualHost(); validateExchanges(origExchangeCount, exchanges); validateBindingProperties(); } /** * Tests binding removal by creating a durable exchange, and queue, binding them together, * recovering to verify the persistence, then removing it from the store, and ensuring * that following the second reload process it is not recovered. */ public void testDurableBindingRemoval() throws Exception { QueueRegistry queueRegistry = getVirtualHost().getQueueRegistry(); //create durable queue and exchange, bind them Exchange exch = createExchange(DirectExchange.TYPE, directExchangeName, true); createQueue(durableQueueName, false, true, false, false); bindQueueToExchange(exch, directRouting, queueRegistry.getQueue(durableQueueName), false, null); assertEquals("Incorrect number of bindings registered before recovery", 1, queueRegistry.getQueue(durableQueueName).getBindings().size()); //verify binding is actually normally recovered reloadVirtualHost(); queueRegistry = getVirtualHost().getQueueRegistry(); assertEquals("Incorrect number of bindings registered after first recovery", 1, queueRegistry.getQueue(durableQueueName).getBindings().size()); ExchangeRegistry exchangeRegistry = getVirtualHost().getExchangeRegistry(); exch = exchangeRegistry.getExchange(directExchangeName); assertNotNull("Exchange was not recovered", exch); //remove the binding and verify result after recovery unbindQueueFromExchange(exch, directRouting, queueRegistry.getQueue(durableQueueName), false, null); reloadVirtualHost(); queueRegistry = getVirtualHost().getQueueRegistry(); assertEquals("Incorrect number of bindings registered after second recovery", 0, queueRegistry.getQueue(durableQueueName).getBindings().size()); } /** * Validates that the durable exchanges are still present, the non durable exchange is not, * and that the new exchanges are not the same objects as the provided list (i.e. that the * reload actually generated new exchange objects) */ private void validateExchanges(int originalNumExchanges, Map<AMQShortString, Exchange> oldExchanges) { ExchangeRegistry registry = getVirtualHost().getExchangeRegistry(); assertTrue(directExchangeName + " exchange NOT reloaded", registry.getExchangeNames().contains(directExchangeName)); assertTrue(topicExchangeName + " exchange NOT reloaded", registry.getExchangeNames().contains(topicExchangeName)); assertTrue(nonDurableExchangeName + " exchange reloaded", !registry.getExchangeNames().contains(nonDurableExchangeName)); //check the old exchange objects are not the same as the new exchanges assertTrue(directExchangeName + " exchange NOT reloaded", registry.getExchange(directExchangeName) != oldExchanges.get(directExchangeName)); assertTrue(topicExchangeName + " exchange NOT reloaded", registry.getExchange(topicExchangeName) != oldExchanges.get(topicExchangeName)); // There should only be the original exchanges + our 2 recovered durable exchanges assertEquals("Incorrect number of exchanges available", originalNumExchanges + 2, registry.getExchangeNames().size()); } /** Validates the Durable queues and their properties are as expected following recovery */ private void validateBindingProperties() { QueueRegistry queueRegistry = getVirtualHost().getQueueRegistry(); assertEquals("Incorrect number of (durable) queues following recovery", 6, queueRegistry.getQueues().size()); validateBindingProperties(queueRegistry.getQueue(durablePriorityQueueName).getBindings(), false); validateBindingProperties(queueRegistry.getQueue(durablePriorityTopicQueueName).getBindings(), true); validateBindingProperties(queueRegistry.getQueue(durableQueueName).getBindings(), false); validateBindingProperties(queueRegistry.getQueue(durableTopicQueueName).getBindings(), true); validateBindingProperties(queueRegistry.getQueue(durableExclusiveQueueName).getBindings(), false); } /** * Validate that each queue is bound only once following recovery (i.e. that bindings for non durable * queues or to non durable exchanges are not recovered), and if a selector should be present * that it is and contains the correct value * * @param bindings the set of bindings to validate * @param useSelectors if set, check the binding has a JMS_SELECTOR argument and the correct value for it */ private void validateBindingProperties(List<Binding> bindings, boolean useSelectors) { assertEquals("Each queue should only be bound once.", 1, bindings.size()); Binding binding = bindings.get(0); if (useSelectors) { assertTrue("Binding does not contain a Selector argument.", binding.getArguments().containsKey(AMQPFilterTypes.JMS_SELECTOR.getValue())); assertEquals("The binding selector argument is incorrect", SELECTOR_VALUE, binding.getArguments().get(AMQPFilterTypes.JMS_SELECTOR.getValue()).toString()); } } private void setQueueExclusivity(boolean exclusive) throws AMQException { QueueRegistry queueRegistry = getVirtualHost().getQueueRegistry(); AMQQueue queue = queueRegistry.getQueue(durableExclusiveQueueName); queue.setExclusive(exclusive); } private void validateQueueExclusivityProperty(boolean expected) { QueueRegistry queueRegistry = getVirtualHost().getQueueRegistry(); AMQQueue queue = queueRegistry.getQueue(durableExclusiveQueueName); assertEquals("Queue exclusivity was incorrect", queue.isExclusive(), expected); } private void validateDurableQueueProperties() { QueueRegistry queueRegistry = getVirtualHost().getQueueRegistry(); validateQueueProperties(queueRegistry.getQueue(durablePriorityQueueName), true, true, false, false); validateQueueProperties(queueRegistry.getQueue(durablePriorityTopicQueueName), true, true, false, false); validateQueueProperties(queueRegistry.getQueue(durableQueueName), false, true, false, false); validateQueueProperties(queueRegistry.getQueue(durableTopicQueueName), false, true, false, false); validateQueueProperties(queueRegistry.getQueue(durableExclusiveQueueName), false, true, true, false); validateQueueProperties(queueRegistry.getQueue(durableLastValueQueueName), false, true, true, true); } private void validateQueueProperties(AMQQueue queue, boolean usePriority, boolean durable, boolean exclusive, boolean lastValueQueue) { if(usePriority || lastValueQueue) { assertNotSame("Queues cant be both Priority and LastValue based", usePriority, lastValueQueue); } if (usePriority) { assertEquals("Queue is no longer a Priority Queue", AMQPriorityQueue.class, queue.getClass()); assertEquals("Priority Queue does not have set priorities", DEFAULT_PRIORTY_LEVEL, ((AMQPriorityQueue) queue).getPriorities()); } else if (lastValueQueue) { assertEquals("Queue is no longer a LastValue Queue", ConflationQueue.class, queue.getClass()); assertEquals("LastValue Queue Key has changed", LVQ_KEY, ((ConflationQueue) queue).getConflationKey()); } else { assertEquals("Queue is not 'simple'", SimpleAMQQueue.class, queue.getClass()); } assertEquals("Queue owner is not as expected", queueOwner, queue.getOwner()); assertEquals("Queue durability is not as expected", durable, queue.isDurable()); assertEquals("Queue exclusivity is not as expected", exclusive, queue.isExclusive()); } /** * Delete the Store Environment path * * @param configuration The configuration that contains the store environment path. */ private void cleanup(File environmentPath) { if (environmentPath.exists()) { FileUtils.delete(environmentPath, true); } } private void sendMessageOnExchange(Exchange exchange, AMQShortString routingKey, boolean deliveryMode) { //Set MessagePersistence BasicContentHeaderProperties properties = new BasicContentHeaderProperties(); properties.setDeliveryMode(deliveryMode ? Integer.valueOf(2).byteValue() : Integer.valueOf(1).byteValue()); FieldTable headers = properties.getHeaders(); headers.setString("Test", "MST"); properties.setHeaders(headers); MessagePublishInfo messageInfo = new TestMessagePublishInfo(exchange, false, false, routingKey); final IncomingMessage currentMessage; currentMessage = new IncomingMessage(messageInfo); currentMessage.setExchange(exchange); ContentHeaderBody headerBody = new ContentHeaderBody(); headerBody.classId = BasicConsumeBodyImpl.CLASS_ID; headerBody.bodySize = 0; headerBody.setProperties(properties); try { currentMessage.setContentHeaderBody(headerBody); } catch (AMQException e) { fail(e.getMessage()); } currentMessage.setExpiration(); MessageMetaData mmd = currentMessage.headersReceived(); currentMessage.setStoredMessage(getVirtualHost().getMessageStore().addMessage(mmd)); currentMessage.getStoredMessage().flushToStore(); currentMessage.route(); // check and deliver if header says body length is zero if (currentMessage.allContentReceived()) { ServerTransaction trans = new AutoCommitTransaction(getVirtualHost().getMessageStore()); final List<? extends BaseQueue> destinationQueues = currentMessage.getDestinationQueues(); trans.enqueue(currentMessage.getDestinationQueues(), currentMessage, new ServerTransaction.Action() { public void postCommit() { try { AMQMessage message = new AMQMessage(currentMessage.getStoredMessage()); for(BaseQueue queue : destinationQueues) { queue.enqueue(message); } } catch (AMQException e) { e.printStackTrace(); } } public void onRollback() { //To change body of implemented methods use File | Settings | File Templates. } }); } } private void createAllQueues() { //Register Durable Priority Queue createQueue(durablePriorityQueueName, true, true, false, false); //Register Durable Simple Queue createQueue(durableQueueName, false, true, false, false); //Register Durable Exclusive Simple Queue createQueue(durableExclusiveQueueName, false, true, true, false); //Register Durable LastValue Queue createQueue(durableLastValueQueueName, false, true, true, true); //Register NON-Durable Priority Queue createQueue(priorityQueueName, true, false, false, false); //Register NON-Durable Simple Queue createQueue(queueName, false, false, false, false); } private void createAllTopicQueues() { //Register Durable Priority Queue createQueue(durablePriorityTopicQueueName, true, true, false, false); //Register Durable Simple Queue createQueue(durableTopicQueueName, false, true, false, false); //Register NON-Durable Priority Queue createQueue(priorityTopicQueueName, true, false, false, false); //Register NON-Durable Simple Queue createQueue(topicQueueName, false, false, false, false); } private void createQueue(AMQShortString queueName, boolean usePriority, boolean durable, boolean exclusive, boolean lastValueQueue) { FieldTable queueArguments = null; if(usePriority || lastValueQueue) { assertNotSame("Queues cant be both Priority and LastValue based", usePriority, lastValueQueue); } if (usePriority) { queueArguments = new FieldTable(); queueArguments.put(AMQQueueFactory.X_QPID_PRIORITIES, DEFAULT_PRIORTY_LEVEL); } if (lastValueQueue) { queueArguments = new FieldTable(); queueArguments.put(new AMQShortString(AMQQueueFactory.QPID_LAST_VALUE_QUEUE_KEY), LVQ_KEY); } AMQQueue queue = null; //Ideally we would be able to use the QueueDeclareHandler here. try { queue = AMQQueueFactory.createAMQQueueImpl(queueName, durable, queueOwner, false, exclusive, getVirtualHost(), queueArguments); validateQueueProperties(queue, usePriority, durable, exclusive, lastValueQueue); if (queue.isDurable() && !queue.isAutoDelete()) { getVirtualHost().getMessageStore().createQueue(queue, queueArguments); } } catch (AMQException e) { fail(e.getMessage()); } getVirtualHost().getQueueRegistry().registerQueue(queue); } private Map<AMQShortString, Exchange> createExchanges() { Map<AMQShortString, Exchange> exchanges = new HashMap<AMQShortString, Exchange>(); //Register non-durable DirectExchange exchanges.put(nonDurableExchangeName, createExchange(DirectExchange.TYPE, nonDurableExchangeName, false)); //Register durable DirectExchange and TopicExchange exchanges.put(directExchangeName ,createExchange(DirectExchange.TYPE, directExchangeName, true)); exchanges.put(topicExchangeName,createExchange(TopicExchange.TYPE, topicExchangeName, true)); return exchanges; } private Exchange createExchange(ExchangeType<?> type, AMQShortString name, boolean durable) { Exchange exchange = null; try { exchange = type.newInstance(getVirtualHost(), name, durable, 0, false); } catch (AMQException e) { fail(e.getMessage()); } try { getVirtualHost().getExchangeRegistry().registerExchange(exchange); if (durable) { getVirtualHost().getMessageStore().createExchange(exchange); } } catch (AMQException e) { fail(e.getMessage()); } return exchange; } private void bindAllQueuesToExchange(Exchange exchange, AMQShortString routingKey) { FieldTable queueArguments = new FieldTable(); queueArguments.put(AMQQueueFactory.X_QPID_PRIORITIES, DEFAULT_PRIORTY_LEVEL); QueueRegistry queueRegistry = getVirtualHost().getQueueRegistry(); bindQueueToExchange(exchange, routingKey, queueRegistry.getQueue(durablePriorityQueueName), false, queueArguments); bindQueueToExchange(exchange, routingKey, queueRegistry.getQueue(durableQueueName), false, null); bindQueueToExchange(exchange, routingKey, queueRegistry.getQueue(priorityQueueName), false, queueArguments); bindQueueToExchange(exchange, routingKey, queueRegistry.getQueue(queueName), false, null); bindQueueToExchange(exchange, routingKey, queueRegistry.getQueue(durableExclusiveQueueName), false, null); } private void bindAllTopicQueuesToExchange(Exchange exchange, AMQShortString routingKey) { FieldTable queueArguments = new FieldTable(); queueArguments.put(AMQQueueFactory.X_QPID_PRIORITIES, DEFAULT_PRIORTY_LEVEL); QueueRegistry queueRegistry = getVirtualHost().getQueueRegistry(); bindQueueToExchange(exchange, routingKey, queueRegistry.getQueue(durablePriorityTopicQueueName), true, queueArguments); bindQueueToExchange(exchange, routingKey, queueRegistry.getQueue(durableTopicQueueName), true, null); bindQueueToExchange(exchange, routingKey, queueRegistry.getQueue(priorityTopicQueueName), true, queueArguments); bindQueueToExchange(exchange, routingKey, queueRegistry.getQueue(topicQueueName), true, null); } protected void bindQueueToExchange(Exchange exchange, AMQShortString routingKey, AMQQueue queue, boolean useSelector, FieldTable queueArguments) { FieldTable bindArguments = null; if (useSelector) { bindArguments = new FieldTable(); bindArguments.put(AMQPFilterTypes.JMS_SELECTOR.getValue(), SELECTOR_VALUE ); } try { getVirtualHost().getBindingFactory().addBinding(String.valueOf(routingKey), queue, exchange, FieldTable.convertToMap(bindArguments)); } catch (Exception e) { fail(e.getMessage()); } } protected void unbindQueueFromExchange(Exchange exchange, AMQShortString routingKey, AMQQueue queue, boolean useSelector, FieldTable queueArguments) { FieldTable bindArguments = null; if (useSelector) { bindArguments = new FieldTable(); bindArguments.put(AMQPFilterTypes.JMS_SELECTOR.getValue(), SELECTOR_VALUE ); } try { getVirtualHost().getBindingFactory().removeBinding(String.valueOf(routingKey), queue, exchange, FieldTable.convertToMap(bindArguments)); } catch (Exception e) { fail(e.getMessage()); } } private void validateMessageOnTopics(long messageCount, boolean allQueues) { validateMessageOnQueue(durablePriorityTopicQueueName, messageCount); validateMessageOnQueue(durableTopicQueueName, messageCount); if (allQueues) { validateMessageOnQueue(priorityTopicQueueName, messageCount); validateMessageOnQueue(topicQueueName, messageCount); } } private void validateMessageOnQueues(long messageCount, boolean allQueues) { validateMessageOnQueue(durablePriorityQueueName, messageCount); validateMessageOnQueue(durableQueueName, messageCount); if (allQueues) { validateMessageOnQueue(priorityQueueName, messageCount); validateMessageOnQueue(queueName, messageCount); } } private void validateMessageOnQueue(AMQShortString queueName, long messageCount) { AMQQueue queue = getVirtualHost().getQueueRegistry().getQueue(queueName); assertNotNull("Queue(" + queueName + ") not correctly registered:", queue); assertEquals("Incorrect Message count on queue:" + queueName, messageCount, queue.getMessageCount()); } private class TestMessagePublishInfo implements MessagePublishInfo { Exchange _exchange; boolean _immediate; boolean _mandatory; AMQShortString _routingKey; TestMessagePublishInfo(Exchange exchange, boolean immediate, boolean mandatory, AMQShortString routingKey) { _exchange = exchange; _immediate = immediate; _mandatory = mandatory; _routingKey = routingKey; } public AMQShortString getExchange() { return _exchange.getNameShortString(); } public void setExchange(AMQShortString exchange) { //no-op } public void setRoutingKey(AMQShortString routingKey) { throw new NotImplementedException(); } public boolean isImmediate() { return _immediate; } public boolean isMandatory() { return _mandatory; } public AMQShortString getRoutingKey() { return _routingKey; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.spark; import org.apache.commons.lang3.StringUtils; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.SQLContext; import org.apache.zeppelin.interpreter.ZeppelinContext; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.interpreter.InterpreterException; import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.util.InterpreterOutputStream; import org.apache.zeppelin.python.IPythonInterpreter; import org.apache.zeppelin.python.PythonInterpreter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.LinkedList; import java.util.List; import java.util.Properties; /** * Interpreter for PySpark, it is the first implementation of interpreter for PySpark, so with less * features compared to IPySparkInterpreter, but requires less prerequisites than * IPySparkInterpreter, only python is required. */ public class PySparkInterpreter extends PythonInterpreter { private static Logger LOGGER = LoggerFactory.getLogger(PySparkInterpreter.class); private SparkInterpreter sparkInterpreter; private InterpreterContext curIntpContext; public PySparkInterpreter(Properties property) { super(property); this.useBuiltinPy4j = false; } @Override public void open() throws InterpreterException { setProperty("zeppelin.python.useIPython", getProperty("zeppelin.pyspark.useIPython", "true")); URL [] urls = new URL[0]; List<URL> urlList = new LinkedList<>(); String localRepo = getProperty("zeppelin.interpreter.localRepo"); if (localRepo != null) { File localRepoDir = new File(localRepo); if (localRepoDir.exists()) { File[] files = localRepoDir.listFiles(); if (files != null) { for (File f : files) { try { urlList.add(f.toURI().toURL()); } catch (MalformedURLException e) { LOGGER.error("Error", e); } } } } } urls = urlList.toArray(urls); ClassLoader oldCl = Thread.currentThread().getContextClassLoader(); try { URLClassLoader newCl = new URLClassLoader(urls, oldCl); Thread.currentThread().setContextClassLoader(newCl); // must create spark interpreter after ClassLoader is set, otherwise the additional jars // can not be loaded by spark repl. this.sparkInterpreter = getInterpreterInTheSameSessionByClassName(SparkInterpreter.class); setProperty("zeppelin.py4j.useAuth", sparkInterpreter.getSparkVersion().isSecretSocketSupported() + ""); // create Python Process and JVM gateway super.open(); } finally { Thread.currentThread().setContextClassLoader(oldCl); } if (!useIPython()) { // Initialize Spark in Python Process try { bootstrapInterpreter("python/zeppelin_pyspark.py"); } catch (IOException e) { LOGGER.error("Fail to bootstrap pyspark", e); throw new InterpreterException("Fail to bootstrap pyspark", e); } } } @Override public void close() throws InterpreterException { super.close(); if (sparkInterpreter != null) { sparkInterpreter.close(); } } @Override protected IPythonInterpreter getIPythonInterpreter() throws InterpreterException { return getInterpreterInTheSameSessionByClassName(IPySparkInterpreter.class, false); } @Override protected ZeppelinContext createZeppelinContext() { return sparkInterpreter.getZeppelinContext(); } @Override public InterpreterResult interpret(String st, InterpreterContext context) throws InterpreterException { curIntpContext = context; // redirect java stdout/stdout to interpreter output. Because pyspark may call java code. PrintStream originalStdout = System.out; PrintStream originalStderr = System.err; try { System.setOut(new PrintStream(context.out)); System.setErr(new PrintStream(context.out)); Utils.printDeprecateMessage(sparkInterpreter.getSparkVersion(), context, properties); return super.interpret(st, context); } finally { System.setOut(originalStdout); System.setErr(originalStderr); } } @Override protected void preCallPython(InterpreterContext context) { String jobGroup = Utils.buildJobGroupId(context); String jobDesc = Utils.buildJobDesc(context); callPython(new PythonInterpretRequest( String.format("if 'sc' in locals():\n\tsc.setJobGroup('%s', '%s')", jobGroup, jobDesc), false, false)); String pool = "None"; if (context.getLocalProperties().containsKey("pool")) { pool = "'" + context.getLocalProperties().get("pool") + "'"; } String setPoolStmt = "if 'sc' in locals():\n\tsc.setLocalProperty('spark.scheduler.pool', " + pool + ")"; callPython(new PythonInterpretRequest(setPoolStmt, false, false)); callPython(new PythonInterpretRequest("intp.setInterpreterContextInPython()", false, false)); } // Python side will call InterpreterContext.get() too, but it is in a different thread other than the // java interpreter thread. So we should call this method in python side as well. public void setInterpreterContextInPython() { InterpreterContext.set(curIntpContext); } // Run python shell // Choose python in the order of // spark.pyspark.driver.python > spark.pyspark.python > PYSPARK_DRIVER_PYTHON > PYSPARK_PYTHON @Override protected String getPythonExec() { if (!StringUtils.isBlank(getProperty("spark.pyspark.driver.python", ""))) { return properties.getProperty("spark.pyspark.driver.python"); } if (!StringUtils.isBlank(getProperty("spark.pyspark.python", ""))) { return properties.getProperty("spark.pyspark.python"); } if (System.getenv("PYSPARK_PYTHON") != null) { return System.getenv("PYSPARK_PYTHON"); } if (System.getenv("PYSPARK_DRIVER_PYTHON") != null) { return System.getenv("PYSPARK_DRIVER_PYTHON"); } return "python"; } public ZeppelinContext getZeppelinContext() { if (sparkInterpreter != null) { return sparkInterpreter.getZeppelinContext(); } else { return null; } } public JavaSparkContext getJavaSparkContext() { if (sparkInterpreter == null) { return null; } else { return new JavaSparkContext(sparkInterpreter.getSparkContext()); } } public Object getSparkSession() { if (sparkInterpreter == null) { return null; } else { return sparkInterpreter.getSparkSession(); } } public SparkConf getSparkConf() { JavaSparkContext sc = getJavaSparkContext(); if (sc == null) { return null; } else { return sc.getConf(); } } public Object getSQLContext() { if (sparkInterpreter == null) { return null; } else { return sparkInterpreter.getSQLContext(); } } public boolean isSpark1() { return sparkInterpreter.getSparkVersion().getMajorVersion() == 1; } public boolean isSpark3() { return sparkInterpreter.getSparkVersion().getMajorVersion() == 3; } }
package mil.nga.giat.geowave.analytic.mapreduce.dbscan; import java.io.IOException; import java.io.Serializable; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.TaskCounter; import org.geotools.feature.type.BasicFeatureTypes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import mil.nga.giat.geowave.analytic.IndependentJobRunner; import mil.nga.giat.geowave.analytic.PropertyManagement; import mil.nga.giat.geowave.analytic.mapreduce.GeoWaveInputFormatConfiguration; import mil.nga.giat.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration; import mil.nga.giat.geowave.analytic.mapreduce.MapReduceJobController; import mil.nga.giat.geowave.analytic.mapreduce.MapReduceJobRunner; import mil.nga.giat.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration; import mil.nga.giat.geowave.analytic.mapreduce.SequenceFileOutputFormatConfiguration; import mil.nga.giat.geowave.analytic.mapreduce.clustering.runner.GeoWaveInputLoadJobRunner; import mil.nga.giat.geowave.analytic.mapreduce.nn.NNMapReduce.PassthruPartitioner; import mil.nga.giat.geowave.analytic.param.ClusteringParameters; import mil.nga.giat.geowave.analytic.param.ClusteringParameters.Clustering; import mil.nga.giat.geowave.analytic.param.FormatConfiguration; import mil.nga.giat.geowave.analytic.param.GlobalParameters; import mil.nga.giat.geowave.analytic.param.HullParameters; import mil.nga.giat.geowave.analytic.param.MapReduceParameters; import mil.nga.giat.geowave.analytic.param.OutputParameters; import mil.nga.giat.geowave.analytic.param.ParameterEnum; import mil.nga.giat.geowave.analytic.param.PartitionParameters; import mil.nga.giat.geowave.analytic.param.PartitionParameters.Partition; import mil.nga.giat.geowave.analytic.partitioner.OrthodromicDistancePartitioner; import mil.nga.giat.geowave.analytic.partitioner.Partitioner; /** * DBScan involves multiple iterations. The first iteration conceivably takes a * set of points and produces small clusters (nearest neighbors). Each * subsequent iteration merges clusters within a given distance from each other. * This process can continue no new clusters are created (merges do not occur). * * The first iteration places a constraint on the minimum number of neighbors. * Subsequent iterations do not have a minimum, since each of the clusters is * already vetted out by the first iteration. */ public class DBScanIterationsJobRunner implements MapReduceJobRunner, IndependentJobRunner { protected static final Logger LOGGER = LoggerFactory.getLogger(DBScanIterationsJobRunner.class); DBScanJobRunner jobRunner = new DBScanJobRunner(); GeoWaveInputLoadJobRunner inputLoadRunner = new GeoWaveInputLoadJobRunner(); protected FormatConfiguration inputFormatConfiguration; protected int zoomLevel = 1; public DBScanIterationsJobRunner() { super(); inputFormatConfiguration = new GeoWaveInputFormatConfiguration(); jobRunner.setInputFormatConfiguration(inputFormatConfiguration); inputLoadRunner.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration()); } public void setInputFormatConfiguration( final FormatConfiguration inputFormatConfiguration ) { this.inputFormatConfiguration = inputFormatConfiguration; } public void setReducerCount( final int reducerCount ) { jobRunner.setReducerCount(reducerCount); } protected void setZoomLevel( final int zoomLevel ) { this.zoomLevel = zoomLevel; } @Override public int run( final Configuration config, final PropertyManagement runTimeProperties ) throws Exception { runTimeProperties.storeIfEmpty( GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString()); FileSystem fs = null; try { fs = FileSystem.get(config); final String outputBaseDir = runTimeProperties.getPropertyAsString( MapReduceParameters.MRConfig.HDFS_BASE_DIR, "/tmp"); Path startPath = new Path( outputBaseDir + "/level_0"); if (fs.exists(startPath)) { fs.delete( startPath, true); } runTimeProperties.storeIfEmpty( Partition.PARTITIONER_CLASS, OrthodromicDistancePartitioner.class); final double maxDistance = runTimeProperties.getPropertyAsDouble( Partition.MAX_DISTANCE, 10); final double precisionDecreaseRate = runTimeProperties.getPropertyAsDouble( Partition.PARTITION_DECREASE_RATE, 0.15); double precisionFactor = runTimeProperties.getPropertyAsDouble( Partition.PARTITION_PRECISION, 1.0); runTimeProperties.storeIfEmpty( Partition.DISTANCE_THRESHOLDS, Double.toString(maxDistance)); final boolean overrideSecondary = runTimeProperties.hasProperty(Partition.SECONDARY_PARTITIONER_CLASS); if (!overrideSecondary) { final Serializable distances = runTimeProperties.get(Partition.DISTANCE_THRESHOLDS); String dstStr; if (distances == null) { dstStr = "0.000001"; } else { dstStr = distances.toString(); } final String distancesArray[] = dstStr.split(","); final double[] distancePerDimension = new double[distancesArray.length]; { int i = 0; for (final String eachDistance : distancesArray) { distancePerDimension[i++] = Double.valueOf(eachDistance); } } boolean secondary = precisionFactor < 1.0; double total = 1.0; for (final double dist : distancePerDimension) { total *= dist; } secondary |= (total >= (Math.pow( maxDistance, distancePerDimension.length) * 2.0)); if (secondary) { runTimeProperties.copy( Partition.PARTITIONER_CLASS, Partition.SECONDARY_PARTITIONER_CLASS); } } jobRunner.setInputFormatConfiguration(inputFormatConfiguration); jobRunner.setOutputFormatConfiguration(new SequenceFileOutputFormatConfiguration( startPath)); LOGGER.info( "Running with partition distance {}", maxDistance); final int initialStatus = jobRunner.run( config, runTimeProperties); if (initialStatus != 0) { return initialStatus; } precisionFactor = precisionFactor - precisionDecreaseRate; int maxIterationCount = runTimeProperties.getPropertyAsInt( ClusteringParameters.Clustering.MAX_ITERATIONS, 15); int iteration = 2; long lastRecordCount = 0; while ((maxIterationCount > 0) && (precisionFactor > 0)) { // context does not mater in this case try { final Partitioner<?> partitioner = runTimeProperties.getClassInstance( PartitionParameters.Partition.PARTITIONER_CLASS, Partitioner.class, OrthodromicDistancePartitioner.class); partitioner.initialize( Job.getInstance(config), partitioner.getClass()); } catch (final IllegalArgumentException argEx) { // this occurs if the partitioner decides that the distance // is // invalid (e.g. bigger than the map space). // In this case, we just exist out of the loop. // startPath has the final data LOGGER.info( "Distance is invalid", argEx); break; } catch (final Exception e1) { throw new IOException( e1); } final PropertyManagement localScopeProperties = new PropertyManagement( runTimeProperties); /** * Re-partitioning the fat geometries can force a large number * of partitions. The geometries end up being represented in * multiple partitions. Better to skip secondary partitioning. * 0.9 is a bit of a magic number. Ideally, it is based on the * area of the max distance cube divided by the area as defined * by threshold distances. However, looking up the partition * dimension space or assuming only two dimensions were both * undesirable. */ if ((precisionFactor <= 0.9) && !overrideSecondary) { localScopeProperties.store( Partition.SECONDARY_PARTITIONER_CLASS, PassthruPartitioner.class); } localScopeProperties.store( Partition.PARTITION_PRECISION, precisionFactor); jobRunner.setInputFormatConfiguration(new SequenceFileInputFormatConfiguration( startPath)); jobRunner.setFirstIteration(false); localScopeProperties.store( HullParameters.Hull.ZOOM_LEVEL, zoomLevel); localScopeProperties.store( HullParameters.Hull.ITERATION, iteration); localScopeProperties.storeIfEmpty( OutputParameters.Output.DATA_TYPE_ID, localScopeProperties.getPropertyAsString( HullParameters.Hull.DATA_TYPE_ID, "concave_hull")); // Set to zero to force each cluster to be moved into the next // iteration // even if no merge occurs localScopeProperties.store( ClusteringParameters.Clustering.MINIMUM_SIZE, 0); final Path nextPath = new Path( outputBaseDir + "/level_" + iteration); if (fs.exists(nextPath)) { fs.delete( nextPath, true); } jobRunner.setOutputFormatConfiguration(new SequenceFileOutputFormatConfiguration( nextPath)); final int status = jobRunner.run( config, localScopeProperties); if (status != 0) { return status; } final long currentOutputCount = jobRunner.getCounterValue(TaskCounter.REDUCE_OUTPUT_RECORDS); if (currentOutputCount == lastRecordCount) { maxIterationCount = 0; } lastRecordCount = currentOutputCount; startPath = nextPath; maxIterationCount--; precisionFactor -= precisionDecreaseRate; iteration++; } final PropertyManagement localScopeProperties = new PropertyManagement( runTimeProperties); localScopeProperties.storeIfEmpty( OutputParameters.Output.DATA_TYPE_ID, localScopeProperties.getPropertyAsString( HullParameters.Hull.DATA_TYPE_ID, "concave_hull")); localScopeProperties.storeIfEmpty( OutputParameters.Output.DATA_NAMESPACE_URI, localScopeProperties.getPropertyAsString( HullParameters.Hull.DATA_NAMESPACE_URI, BasicFeatureTypes.DEFAULT_NAMESPACE)); localScopeProperties.storeIfEmpty( OutputParameters.Output.INDEX_ID, localScopeProperties.get(HullParameters.Hull.INDEX_ID)); inputLoadRunner.setInputFormatConfiguration(new SequenceFileInputFormatConfiguration( startPath)); inputLoadRunner.run( config, runTimeProperties); } finally { if (fs != null) fs.close(); } return 0; } @Override public Collection<ParameterEnum<?>> getParameters() { final Set<ParameterEnum<?>> params = new HashSet<ParameterEnum<?>>(); params.addAll(jobRunner.getParameters()); params.addAll(inputLoadRunner.getParameters()); params.add(Clustering.MAX_ITERATIONS); params.add(Partition.PARTITION_DECREASE_RATE); params.add(Partition.PARTITION_PRECISION); return params; } @Override public int run( final PropertyManagement runTimeProperties ) throws Exception { return this.run( MapReduceJobController.getConfiguration(runTimeProperties), runTimeProperties); } }
/*L * Copyright SAIC. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cagwas/LICENSE.txt for details. */ package gov.nih.nci.cagwas.web.action; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Collection; import org.apache.log4j.Logger; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionMessages; import gov.nih.nci.cagwas.web.form.SNPSearchForm; import gov.nih.nci.caintegrator.studyQueryService.dto.annotation.AnnotationCriteria; import gov.nih.nci.caintegrator.studyQueryService.dto.annotation.PhysicalPositionCriteria; import gov.nih.nci.caintegrator.studyQueryService.dto.germline.PanelCriteria; /** * The AnnotationCriteriaHelper class will build the annotation criteria * object for a search given a SNPSearchForm. * <P> * @author mholck */ public class AnnotationCriteriaHelper { private static Logger logger = Logger.getLogger(AnnotationCriteriaHelper.class); /** * The buildCriteria method will create and populate the AnnotationCriteria * needed for a search and will return this. * <P> * @param form The form that includes all the needed values for the annotation criteria * @param errors The ActionMessages used to report errors * @return The completed AnnotationCriteria object */ public AnnotationCriteria buildCriteria(SNPSearchForm form, ActionMessages errors) { AnnotationCriteria annoCrit = new AnnotationCriteria(); // Set the panel if any String panel = form.getPanel(); String panelName = form.getPanelName(); if ((panel != null) && (panel.length() >= 1)) { logger.debug("panel id is " + panel); PanelCriteria pc = new PanelCriteria(); pc.setSnpPanelID(new Long(panel)); if ((panelName != null) && (panelName.length() >= 1)) { logger.debug("panel name is " + panelName); pc.setName(panelName); } annoCrit.setPanelCriteria(pc); } // Setup the physical position criteria if any String chromosome = form.getChromosome(); if ((chromosome != null) && (chromosome.length() >= 1)) { logger.debug("chromosome is " + chromosome); // If there was a chromosome set then check the from and to criteria as well String chromFrom = form.getChromosomeFrom(); String chromTo = form.getChromosomeTo(); PhysicalPositionCriteria ppCrit = new PhysicalPositionCriteria(); ppCrit.setChromosome(chromosome); if ((chromFrom != null) && (chromFrom.length() >= 1)) { // Handle the from value based on units selected if (form.getFromUnit().equals("bp")) { logger.debug("from value " + chromFrom); ppCrit.setStartPosition(Long.parseLong(chromFrom)); } else if (form.getFromUnit().equals("kb")) { float value = Float.parseFloat(chromFrom); value *= 1000; logger.debug("from value " + value); ppCrit.setStartPosition(new Long((int)value)); } else if (form.getFromUnit().equals("mb")) { float value = Float.parseFloat(chromFrom); value *= 1000000; logger.debug("from value " + value); ppCrit.setStartPosition(new Long((int)value)); } } if ((chromTo != null) && (chromTo.length() >= 1)) { // Handle the from value based on units selected if (form.getToUnit().equals("bp")) { logger.debug("to value " + chromTo); ppCrit.setEndPosition(Long.parseLong(chromTo)); } else if (form.getToUnit().equals("kb")) { float value = Float.parseFloat(chromTo); value *= 1000; logger.debug("to value " + value); ppCrit.setEndPosition(new Long((int)value)); } else if (form.getToUnit().equals("mb")) { float value = Float.parseFloat(chromTo); value *= 1000000; logger.debug("to value " + value); ppCrit.setEndPosition(new Long((int)value)); } } annoCrit.setPhysicalPositionCriteria(ppCrit); } // Setup the HUGO gene symbols if any if ((form.getHugoFile() != null) && (form.getHugoFile().getFileName().length() >= 1)) { logger.debug("HUGO Symbol File is " + form.getHugoFile().getFileName()); if ((form.getHugoFile().getFileName().endsWith(".txt") || form.getHugoFile().getFileName().endsWith(".TXT")) && (form.getHugoFile().getContentType().equals("text/plain"))) { Collection<String> geneSymbols = new ArrayList<String>(); try { InputStream stream = form.getHugoFile().getInputStream(); String inputLine = null; BufferedReader inFile = new BufferedReader( new InputStreamReader(stream)); int count = 0; while ((inputLine = inFile.readLine()) != null) { inputLine = inputLine.trim(); count++; logger.debug(inputLine.toUpperCase()); if(!inputLine.matches("")){ geneSymbols.add(inputLine.toUpperCase()); } } inFile.close(); if(geneSymbols.size() <= CagwasConstants.MAX_SYMBOLS){ annoCrit.setGeneSymbols(geneSymbols); } else { errors.add("hugoFile", new ActionMessage("error.hugo.count")); } } catch (IOException ex) { logger.error("Errors when uploading hugo file:" + ex.getMessage()); errors.add("hugoFile", new ActionMessage("error.file.access")); } } else { errors.add("hugoFile", new ActionMessage("error.hugofile.type")); } } else { Collection<String> geneSymbols = new ArrayList<String>(); String genes = form.getHugoList(); // Replace any commas with spaces so the split for whitespace will work String cleanGenes = genes.replace(',', ' '); // Now break the string into parts using whitespace as the separator String patternStr = "\\s"; String[] geneList = cleanGenes.split(patternStr); // Handle the case where the split says we have one record but it is empty if ((geneList.length >= 1) && (geneList[0].length() >= 1)) { // Then add them to the symbol list and pass to the search logger.debug("Gene list has " + geneList.length + " values :"); for (int i=0; i < geneList.length; i++) { String gene = geneList[i].trim(); logger.debug(gene.toUpperCase()); if(!gene.matches("")){ geneSymbols.add(gene.toUpperCase()); } } if(geneSymbols.size() <= CagwasConstants.MAX_SYMBOLS){ annoCrit.setGeneSymbols(geneSymbols); } else { errors.add("hugoFile", new ActionMessage("error.hugo.count")); } } } // Setup the dbSNP identifiers if any if ((form.getIdFile() != null) && (form.getIdFile().getFileName().length() >= 1)) { logger.debug("dbSNP ID File is " + form.getIdFile().getFileName()); if ((form.getIdFile().getFileName().endsWith(".txt") || form.getIdFile().getFileName().endsWith(".TXT")) && (form.getIdFile().getContentType().equals("text/plain"))) { Collection<String> dbSNPIds = new ArrayList<String>(); try { InputStream stream = form.getIdFile().getInputStream(); String inputLine = null; BufferedReader inFile = new BufferedReader( new InputStreamReader(stream)); int count = 0; while ((inputLine = inFile.readLine()) != null) { inputLine = inputLine.trim(); count++; logger.debug(inputLine.toLowerCase()); if(!inputLine.matches("")){ dbSNPIds.add(inputLine.toLowerCase()); } } inFile.close(); if(dbSNPIds.size() <= CagwasConstants.MAX_IDS){ annoCrit.setSnpIdentifiers(dbSNPIds); } else { errors.add("idFile", new ActionMessage("error.id.count")); } } catch (IOException ex) { logger.error("Errors when uploading id file:" + ex.getMessage()); errors.add("idFile", new ActionMessage("error.file.access")); } } else { errors.add("idFile", new ActionMessage("error.idfile.type")); } } else { Collection<String> dbSNPIds = new ArrayList<String>(); String ids = form.getIdList(); // Replace any commas with spaces so the split for whitespace will work String cleanIDs = ids.replace(',', ' '); // Now break the string into parts using newline as the separator String patternStr = "\\s"; String[] idList = cleanIDs.split(patternStr); // Handle the case where the split says we have one record but it is empty if ((idList.length >= 1) && (idList[0].length() >= 1)) { // Then add them to the symbol list and pass to the search logger.debug("dbSNP ID list has " + idList.length + " values :"); for (int i=0; i < idList.length; i++) { String id = idList[i].trim(); logger.debug(id.toLowerCase()); if(!id.matches("")){ dbSNPIds.add(id.toLowerCase()); } } if(dbSNPIds.size() <= CagwasConstants.MAX_IDS){ annoCrit.setSnpIdentifiers(dbSNPIds); } else { errors.add("idFile", new ActionMessage("error.id.count")); } } } return annoCrit; } }
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.web.mappings.servlet; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; import java.util.stream.Stream; import org.springframework.boot.actuate.web.mappings.HandlerMethodDescription; import org.springframework.boot.actuate.web.mappings.MappingDescriptionProvider; import org.springframework.context.ApplicationContext; import org.springframework.data.rest.webmvc.support.DelegatingHandlerMapping; import org.springframework.util.ClassUtils; import org.springframework.web.context.WebApplicationContext; import org.springframework.web.method.HandlerMethod; import org.springframework.web.servlet.DispatcherServlet; import org.springframework.web.servlet.HandlerMapping; import org.springframework.web.servlet.handler.AbstractUrlHandlerMapping; import org.springframework.web.servlet.mvc.method.RequestMappingInfo; import org.springframework.web.servlet.mvc.method.RequestMappingInfoHandlerMapping; /** * A {@link MappingDescriptionProvider} that introspects the {@link HandlerMapping * HandlerMappings} that are known to one or more {@link DispatcherServlet * DispatcherServlets}. * * @author Andy Wilkinson * @since 2.0.0 */ public class DispatcherServletsMappingDescriptionProvider implements MappingDescriptionProvider { private static final List<HandlerMappingDescriptionProvider<? extends HandlerMapping>> descriptionProviders; static { List<HandlerMappingDescriptionProvider<? extends HandlerMapping>> providers = new ArrayList<>(); providers.add(new RequestMappingInfoHandlerMappingDescriptionProvider()); providers.add(new UrlHandlerMappingDescriptionProvider()); if (ClassUtils.isPresent( "org.springframework.data.rest.webmvc.support.DelegatingHandlerMapping", null)) { providers.add(new DelegatingHandlerMappingDescriptionProvider( new ArrayList<>(providers))); } descriptionProviders = Collections.unmodifiableList(providers); } @Override public String getMappingName() { return "dispatcherServlets"; } @Override public Map<String, List<DispatcherServletMappingDescription>> describeMappings( ApplicationContext context) { if (context instanceof WebApplicationContext) { return describeMappings((WebApplicationContext) context); } return Collections.emptyMap(); } private Map<String, List<DispatcherServletMappingDescription>> describeMappings( WebApplicationContext context) { Map<String, List<DispatcherServletMappingDescription>> mappings = new HashMap<>(); context.getBeansOfType(DispatcherServlet.class) .forEach((name, dispatcherServlet) -> mappings.put(name, describeMappings(new DispatcherServletHandlerMappings(name, dispatcherServlet, context)))); return mappings; } private List<DispatcherServletMappingDescription> describeMappings( DispatcherServletHandlerMappings mappings) { return mappings.getHandlerMappings().stream().flatMap(this::describe) .collect(Collectors.toList()); } private <T extends HandlerMapping> Stream<DispatcherServletMappingDescription> describe( T handlerMapping) { return describe(handlerMapping, descriptionProviders).stream(); } @SuppressWarnings("unchecked") private static <T extends HandlerMapping> List<DispatcherServletMappingDescription> describe( T handlerMapping, List<HandlerMappingDescriptionProvider<?>> descriptionProviders) { for (HandlerMappingDescriptionProvider<?> descriptionProvider : descriptionProviders) { if (descriptionProvider.getMappingClass().isInstance(handlerMapping)) { return ((HandlerMappingDescriptionProvider<T>) descriptionProvider) .describe(handlerMapping); } } return Collections.emptyList(); } private interface HandlerMappingDescriptionProvider<T extends HandlerMapping> { Class<T> getMappingClass(); List<DispatcherServletMappingDescription> describe(T handlerMapping); } private static final class RequestMappingInfoHandlerMappingDescriptionProvider implements HandlerMappingDescriptionProvider<RequestMappingInfoHandlerMapping> { @Override public Class<RequestMappingInfoHandlerMapping> getMappingClass() { return RequestMappingInfoHandlerMapping.class; } @Override public List<DispatcherServletMappingDescription> describe( RequestMappingInfoHandlerMapping handlerMapping) { Map<RequestMappingInfo, HandlerMethod> handlerMethods = handlerMapping .getHandlerMethods(); return handlerMethods.entrySet().stream().map(this::describe) .collect(Collectors.toList()); } private DispatcherServletMappingDescription describe( Entry<RequestMappingInfo, HandlerMethod> mapping) { DispatcherServletMappingDetails mappingDetails = new DispatcherServletMappingDetails(); mappingDetails .setHandlerMethod(new HandlerMethodDescription(mapping.getValue())); mappingDetails.setRequestMappingConditions( new RequestMappingConditionsDescription(mapping.getKey())); return new DispatcherServletMappingDescription(mapping.getKey().toString(), mapping.getValue().toString(), mappingDetails); } } private static final class UrlHandlerMappingDescriptionProvider implements HandlerMappingDescriptionProvider<AbstractUrlHandlerMapping> { @Override public Class<AbstractUrlHandlerMapping> getMappingClass() { return AbstractUrlHandlerMapping.class; } @Override public List<DispatcherServletMappingDescription> describe( AbstractUrlHandlerMapping handlerMapping) { return handlerMapping.getHandlerMap().entrySet().stream().map(this::describe) .collect(Collectors.toList()); } private DispatcherServletMappingDescription describe( Entry<String, Object> mapping) { return new DispatcherServletMappingDescription(mapping.getKey(), mapping.getValue().toString(), null); } } private static final class DelegatingHandlerMappingDescriptionProvider implements HandlerMappingDescriptionProvider<DelegatingHandlerMapping> { private final List<HandlerMappingDescriptionProvider<?>> descriptionProviders; private DelegatingHandlerMappingDescriptionProvider( List<HandlerMappingDescriptionProvider<?>> descriptionProviders) { this.descriptionProviders = descriptionProviders; } @Override public Class<DelegatingHandlerMapping> getMappingClass() { return DelegatingHandlerMapping.class; } @Override public List<DispatcherServletMappingDescription> describe( DelegatingHandlerMapping handlerMapping) { List<DispatcherServletMappingDescription> descriptions = new ArrayList<>(); for (HandlerMapping delegate : handlerMapping.getDelegates()) { descriptions.addAll(DispatcherServletsMappingDescriptionProvider .describe(delegate, this.descriptionProviders)); } return descriptions; } } }
package com.cloud.network; import com.cloud.acl.ControlledEntity; import com.cloud.api.Displayable; import com.cloud.api.Identity; import com.cloud.api.InternalIdentity; import com.cloud.network.Networks.BroadcastDomainType; import com.cloud.network.Networks.Mode; import com.cloud.network.Networks.TrafficType; import com.cloud.utils.fsm.StateMachine2; import com.cloud.utils.fsm.StateObject; import java.io.Serializable; import java.net.URI; import java.util.ArrayList; import java.util.List; /** * owned by an account. */ public interface Network extends ControlledEntity, StateObject<Network.State>, InternalIdentity, Identity, Serializable, Displayable { String getName(); Mode getMode(); BroadcastDomainType getBroadcastDomainType(); TrafficType getTrafficType(); public void setTrafficType(TrafficType type); String getGateway(); // "cidr" is the Cloudstack managed address space, all CloudStack managed vms get IP address from "cidr", // In general "cidr" also serves as the network CIDR // But in case IP reservation is configured for a Guest network, "networkcidr" is the Effective network CIDR for that network, // "cidr" will still continue to be the effective address space for CloudStack managed vms in that Guest network String getCidr(); // "networkcidr" is the network CIDR of the guest network which uses IP reservation. // It is the summation of "cidr" and the reservedIPrange(the address space used for non CloudStack purposes). // For networks not configured with IP reservation, "networkcidr" is always null String getNetworkCidr(); String getIp6Gateway(); String getIp6Cidr(); String getDns1(); String getDns2(); long getDataCenterId(); long getNetworkOfferingId(); @Override State getState(); boolean isRedundant(); long getRelated(); URI getBroadcastUri(); String getDisplayText(); String getReservationId(); String getNetworkDomain(); GuestType getGuestType(); Long getPhysicalNetworkId(); void setPhysicalNetworkId(Long physicalNetworkId); ACLType getAclType(); boolean isRestartRequired(); boolean getSpecifyIpRanges(); @Deprecated boolean getDisplayNetwork(); @Override boolean isDisplay(); String getGuruName(); /** * @return */ Long getVpcId(); Long getNetworkACLId(); void setNetworkACLId(Long networkACLId); boolean isStrechedL2Network(); enum GuestType { Shared, Isolated, Private } enum Event { ImplementNetwork, DestroyNetwork, OperationSucceeded, OperationFailed } enum State { Allocated("Indicates the network configuration is in allocated but not setup"), Setup("Indicates the network configuration is setup"), Implementing( "Indicates the network configuration is being implemented"), Implemented("Indicates the network configuration is in use"), Shutdown( "Indicates the network configuration is being destroyed"), Destroy("Indicates that the network is destroyed"); protected static final StateMachine2<State, Network.Event, Network> s_fsm = new StateMachine2<>(); static { s_fsm.addTransition(State.Allocated, Event.ImplementNetwork, State.Implementing); s_fsm.addTransition(State.Implementing, Event.OperationSucceeded, State.Implemented); s_fsm.addTransition(State.Implementing, Event.OperationFailed, State.Shutdown); s_fsm.addTransition(State.Implemented, Event.DestroyNetwork, State.Shutdown); s_fsm.addTransition(State.Shutdown, Event.OperationSucceeded, State.Allocated); s_fsm.addTransition(State.Shutdown, Event.OperationFailed, State.Shutdown); s_fsm.addTransition(State.Setup, Event.DestroyNetwork, State.Destroy); s_fsm.addTransition(State.Allocated, Event.DestroyNetwork, State.Destroy); } String _description; private State(final String description) { _description = description; } public static StateMachine2<State, Network.Event, Network> getStateMachine() { return s_fsm; } } class Service { private static final List<Service> supportedServices = new ArrayList<>(); public static final Service Vpn = new Service("Vpn", Capability.SupportedVpnProtocols, Capability.VpnTypes); public static final Service Dhcp = new Service("Dhcp"); public static final Service Dns = new Service("Dns", Capability.AllowDnsSuffixModification); public static final Service Gateway = new Service("Gateway"); public static final Service Firewall = new Service("Firewall", Capability.SupportedProtocols, Capability.MultipleIps, Capability.TrafficStatistics, Capability.SupportedTrafficDirection, Capability.SupportedEgressProtocols); public static final Service Lb = new Service("Lb", Capability.SupportedLBAlgorithms, Capability.SupportedLBIsolation, Capability.SupportedProtocols, Capability.TrafficStatistics, Capability.LoadBalancingSupportedIps, Capability.SupportedStickinessMethods, Capability.ElasticLb, Capability.LbSchemes); public static final Service UserData = new Service("UserData"); public static final Service SourceNat = new Service("SourceNat", Capability.SupportedSourceNatTypes, Capability.RedundantRouter); public static final Service StaticNat = new Service("StaticNat", Capability.ElasticIp); public static final Service PortForwarding = new Service("PortForwarding"); public static final Service SecurityGroup = new Service("SecurityGroup"); public static final Service NetworkACL = new Service("NetworkACL", Capability.SupportedProtocols); public static final Service Connectivity = new Service("Connectivity", Capability.DistributedRouter, Capability.RegionLevelVpc, Capability.StretchedL2Subnet); private final String name; private final Capability[] caps; public Service(final String name, final Capability... caps) { this.name = name; this.caps = caps; supportedServices.add(this); } public static Service getService(final String serviceName) { for (final Service service : supportedServices) { if (service.getName().equalsIgnoreCase(serviceName)) { return service; } } return null; } public String getName() { return name; } public static List<Service> listAllServices() { return supportedServices; } public Capability[] getCapabilities() { return caps; } public boolean containsCapability(final Capability cap) { boolean success = false; if (caps != null) { final int length = caps.length; for (int i = 0; i < length; i++) { if (caps[i].getName().equalsIgnoreCase(cap.getName())) { success = true; break; } } } return success; } } /** * Provider -> NetworkElement must always be one-to-one mapping. Thus for each NetworkElement we need a separate Provider added in here. */ class Provider { private static final List<Provider> supportedProviders = new ArrayList<>(); public static final Provider VirtualRouter = new Provider("VirtualRouter", false, false); public static final Provider ExternalDhcpServer = new Provider("ExternalDhcpServer", true); public static final Provider ExternalGateWay = new Provider("ExternalGateWay", true); public static final Provider ElasticLoadBalancerVm = new Provider("ElasticLoadBalancerVm", false); public static final Provider SecurityGroupProvider = new Provider("SecurityGroupProvider", false); public static final Provider VPCVirtualRouter = new Provider("VpcVirtualRouter", false); public static final Provider None = new Provider("None", false); // NiciraNvp is not an "External" provider, otherwise we get in trouble with NetworkServiceImpl.providersConfiguredForExternalNetworking public static final Provider NiciraNvp = new Provider("NiciraNvp", false); public static final Provider InternalLbVm = new Provider("InternalLbVm", false); private final String name; private final boolean isExternal; // set to true, if on network shutdown resources (acquired/configured at implemented phase) needed to cleaned up. set to false // if no clean-up is required ( for e.g appliance based providers like VirtualRouter, VM is destroyed so there is no need to cleanup). private final boolean needCleanupOnShutdown; public Provider(final String name, final boolean isExternal) { this.name = name; this.isExternal = isExternal; needCleanupOnShutdown = true; supportedProviders.add(this); } public Provider(final String name, final boolean isExternal, final boolean needCleanupOnShutdown) { this.name = name; this.isExternal = isExternal; this.needCleanupOnShutdown = needCleanupOnShutdown; supportedProviders.add(this); } public static Provider getProvider(final String providerName) { for (final Provider provider : supportedProviders) { if (provider.getName().equalsIgnoreCase(providerName)) { return provider; } } return null; } public String getName() { return name; } public boolean isExternal() { return isExternal; } public boolean cleanupNeededOnShutdown() { return needCleanupOnShutdown; } } class Capability { private static final List<Capability> supportedCapabilities = new ArrayList<>(); public static final Capability SupportedProtocols = new Capability("SupportedProtocols"); public static final Capability SupportedLBAlgorithms = new Capability("SupportedLbAlgorithms"); public static final Capability SupportedLBIsolation = new Capability("SupportedLBIsolation"); public static final Capability SupportedStickinessMethods = new Capability("SupportedStickinessMethods"); public static final Capability MultipleIps = new Capability("MultipleIps"); public static final Capability SupportedSourceNatTypes = new Capability("SupportedSourceNatTypes"); public static final Capability SupportedVpnProtocols = new Capability("SupportedVpnTypes"); public static final Capability VpnTypes = new Capability("VpnTypes"); public static final Capability TrafficStatistics = new Capability("TrafficStatistics"); public static final Capability LoadBalancingSupportedIps = new Capability("LoadBalancingSupportedIps"); public static final Capability AllowDnsSuffixModification = new Capability("AllowDnsSuffixModification"); public static final Capability RedundantRouter = new Capability("RedundantRouter"); public static final Capability ElasticIp = new Capability("ElasticIp"); public static final Capability AssociatePublicIP = new Capability("AssociatePublicIP"); public static final Capability ElasticLb = new Capability("ElasticLb"); public static final Capability AutoScaleCounters = new Capability("AutoScaleCounters"); public static final Capability InlineMode = new Capability("InlineMode"); public static final Capability SupportedTrafficDirection = new Capability("SupportedTrafficDirection"); public static final Capability SupportedEgressProtocols = new Capability("SupportedEgressProtocols"); public static final Capability HealthCheckPolicy = new Capability("HealthCheckPolicy"); public static final Capability SslTermination = new Capability("SslTermination"); public static final Capability LbSchemes = new Capability("LbSchemes"); public static final Capability DhcpAccrossMultipleSubnets = new Capability("DhcpAccrossMultipleSubnets"); public static final Capability DistributedRouter = new Capability("DistributedRouter"); public static final Capability StretchedL2Subnet = new Capability("StretchedL2Subnet"); public static final Capability RegionLevelVpc = new Capability("RegionLevelVpc"); private final String name; public Capability(final String name) { this.name = name; supportedCapabilities.add(this); } public static Capability getCapability(final String capabilityName) { for (final Capability capability : supportedCapabilities) { if (capability.getName().equalsIgnoreCase(capabilityName)) { return capability; } } return null; } public String getName() { return name; } } class IpAddresses { private String ip4Address; private String ip6Address; public IpAddresses(final String ip4Address, final String ip6Address) { setIp4Address(ip4Address); setIp6Address(ip6Address); } public String getIp4Address() { return ip4Address; } public void setIp4Address(final String ip4Address) { this.ip4Address = ip4Address; } public String getIp6Address() { return ip6Address; } public void setIp6Address(final String ip6Address) { this.ip6Address = ip6Address; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.metamodel.util; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.Flushable; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PushbackInputStream; import java.io.Reader; import java.io.Writer; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Various helper methods for handling files */ public final class FileHelper { private final static Logger logger = LoggerFactory.getLogger(FileHelper.class); public static final String UTF_8_ENCODING = "UTF-8"; public static final String UTF_16_ENCODING = "UTF-16"; public static final String US_ASCII_ENCODING = "US-ASCII"; public static final String ISO_8859_1_ENCODING = "ISO_8859_1"; public static final String DEFAULT_ENCODING = UTF_8_ENCODING; private FileHelper() { // prevent instantiation } public static File createTempFile(String prefix, String suffix) { try { return File.createTempFile(prefix, suffix); } catch (IOException e) { logger.error("Could not create tempFile", e); File tempDir = getTempDir(); return new File(tempDir, prefix + '.' + suffix); } } public static File getTempDir() { File result = null; String tmpDirPath = System.getProperty("java.io.tmpdir"); if (tmpDirPath != null && !"".equals(tmpDirPath)) { result = new File(tmpDirPath); } else { logger.debug("Could not determine tmpdir by using environment variable."); try { File file = File.createTempFile("foo", "bar"); result = file.getParentFile(); if (!file.delete()) { logger.warn("Could not delete temp file '{}'", file.getAbsolutePath()); } } catch (IOException e) { logger.error("Could not create tempFile in order to find temporary dir", e); result = new File("metamodel.tmp.dir"); if (!result.mkdir()) { throw new IllegalStateException("Could not create directory for temporary files: " + result.getName()); } result.deleteOnExit(); } } if (logger.isInfoEnabled()) { logger.info("Using '{}' as tmpdir.", result.getAbsolutePath()); } return result; } public static Writer getWriter(File file, String encoding, boolean append) throws IllegalStateException { boolean insertBom = !append; return getWriter(file, encoding, append, insertBom); } public static Writer getWriter(OutputStream outputStream, String encoding) throws IllegalStateException { return getWriter(outputStream, encoding, false); } public static Writer getWriter(OutputStream outputStream, String encoding, boolean insertBom) throws IllegalStateException { if (!(outputStream instanceof BufferedOutputStream)) { outputStream = new BufferedOutputStream(outputStream); } try { if (insertBom) { Writer writer = new UnicodeWriter(outputStream, encoding); return writer; } else { Writer writer = new OutputStreamWriter(outputStream, encoding); return writer; } } catch (Exception e) { throw new IllegalStateException(e); } } public static Writer getWriter(File file, String encoding, boolean append, boolean insertBom) throws IllegalStateException { if (append && insertBom) { throw new IllegalArgumentException("Can not insert BOM into appending writer"); } final OutputStream outputStream = getOutputStream(file, append); return getWriter(outputStream, encoding, insertBom); } public static Writer getWriter(File file, String encoding) throws IllegalStateException { return getWriter(file, encoding, false); } public static Reader getReader(InputStream inputStream, String encoding) throws IllegalStateException { try { if (encoding == null || encoding.toLowerCase().indexOf("utf") != -1) { final byte bom[] = new byte[4]; int unread; // auto-detect byte-order-mark @SuppressWarnings("resource") final PushbackInputStream pushbackInputStream = new PushbackInputStream(inputStream, bom.length); final int n = pushbackInputStream.read(bom, 0, bom.length); // Read ahead four bytes and check for BOM marks. if ((bom[0] == (byte) 0xEF) && (bom[1] == (byte) 0xBB) && (bom[2] == (byte) 0xBF)) { encoding = "UTF-8"; unread = n - 3; } else if ((bom[0] == (byte) 0xFE) && (bom[1] == (byte) 0xFF)) { encoding = "UTF-16BE"; unread = n - 2; } else if ((bom[0] == (byte) 0xFF) && (bom[1] == (byte) 0xFE)) { encoding = "UTF-16LE"; unread = n - 2; } else if ((bom[0] == (byte) 0x00) && (bom[1] == (byte) 0x00) && (bom[2] == (byte) 0xFE) && (bom[3] == (byte) 0xFF)) { encoding = "UTF-32BE"; unread = n - 4; } else if ((bom[0] == (byte) 0xFF) && (bom[1] == (byte) 0xFE) && (bom[2] == (byte) 0x00) && (bom[3] == (byte) 0x00)) { encoding = "UTF-32LE"; unread = n - 4; } else { unread = n; } if (unread > 0) { pushbackInputStream.unread(bom, (n - unread), unread); } else if (unread < -1) { pushbackInputStream.unread(bom, 0, 0); } inputStream = pushbackInputStream; } final InputStreamReader inputStreamReader; if (encoding == null) { inputStreamReader = new InputStreamReader(inputStream); } else { inputStreamReader = new InputStreamReader(inputStream, encoding); } return inputStreamReader; } catch (IOException e) { throw new IllegalStateException(e); } } public static Reader getReader(File file, String encoding) throws IllegalStateException { final InputStream inputStream; try { inputStream = new BufferedInputStream(new FileInputStream(file)); } catch (IOException e) { throw new IllegalStateException(e); } return getReader(inputStream, encoding); } public static String readInputStreamAsString(InputStream inputStream, String encoding) throws IllegalStateException { Reader reader = getReader(inputStream, encoding); return readAsString(reader); } public static String readFileAsString(File file, String encoding) throws IllegalStateException { Reader br = getReader(file, encoding); return readAsString(br); } public static String readAsString(Reader reader) throws IllegalStateException { final BufferedReader br = getBufferedReader(reader); try { StringBuilder sb = new StringBuilder(); boolean firstLine = true; for (String line = br.readLine(); line != null; line = br.readLine()) { if (firstLine) { firstLine = false; } else { sb.append('\n'); } sb.append(line); } return sb.toString(); } catch (Exception e) { throw new IllegalStateException(e); } finally { safeClose(br, reader); } } public static void safeClose(Object... objects) { boolean debugEnabled = logger.isDebugEnabled(); if (objects == null || objects.length == 0) { logger.info("safeClose(...) was invoked with null or empty array: {}", objects); return; } for (Object obj : objects) { if (obj != null) { if (debugEnabled) { logger.debug("Trying to safely close {}", obj); } if (obj instanceof Flushable) { try { ((Flushable) obj).flush(); } catch (Exception e) { if (debugEnabled) { logger.debug("Flushing Flushable failed", e); } } } if (obj instanceof AutoCloseable) { try { ((AutoCloseable) obj).close(); } catch (Exception e) { if (debugEnabled) { logger.debug("Closing AutoCloseable failed", e); } } } else { logger.info("obj was not AutoCloseable, trying to find close() method via reflection."); try { Method method = obj.getClass().getMethod("close", new Class[0]); if (method == null) { logger.info("obj did not have a close() method, ignoring"); } else { method.setAccessible(true); method.invoke(obj); } } catch (InvocationTargetException e) { logger.warn("Invoking close() by reflection threw exception", e); } catch (Exception e) { logger.warn("Could not invoke close() by reflection", e); } } } } } public static BufferedWriter getBufferedWriter(File file, String encoding) throws IllegalStateException { Writer writer = getWriter(file, encoding); return new BufferedWriter(writer); } public static BufferedReader getBufferedReader(File file, String encoding) throws IllegalStateException { Reader reader = getReader(file, encoding); return new BufferedReader(reader); } public static BufferedReader getBufferedReader(InputStream inputStream, String encoding) throws IllegalStateException { Reader reader = getReader(inputStream, encoding); return new BufferedReader(reader); } public static Reader getReader(File file) throws IllegalStateException { return getReader(file, DEFAULT_ENCODING); } public static String readFileAsString(File file) throws IllegalStateException { return readFileAsString(file, DEFAULT_ENCODING); } public static BufferedWriter getBufferedWriter(File file) throws IllegalStateException { return getBufferedWriter(file, DEFAULT_ENCODING); } public static Writer getWriter(File file) throws IllegalStateException { return getWriter(file, DEFAULT_ENCODING); } public static void writeString(OutputStream outputStream, String string) throws IllegalStateException { writeString(outputStream, string, DEFAULT_ENCODING); } public static void writeString(OutputStream outputStream, String string, String encoding) throws IllegalStateException { final Writer writer = getWriter(outputStream, encoding); writeString(writer, string, encoding); } public static void writeString(Writer writer, String string) throws IllegalStateException { writeString(writer, string, DEFAULT_ENCODING); } public static void writeString(Writer writer, String string, String encoding) throws IllegalStateException { try { writer.write(string); } catch (Exception e) { throw new IllegalStateException(e); } finally { safeClose(writer); } } public static void writeStringAsFile(File file, String string) throws IllegalStateException { writeStringAsFile(file, string, DEFAULT_ENCODING); } public static void writeStringAsFile(File file, String string, String encoding) throws IllegalStateException { final BufferedWriter bw = getBufferedWriter(file, encoding); writeString(bw, string, encoding); } public static BufferedReader getBufferedReader(File file) throws IllegalStateException { return getBufferedReader(file, DEFAULT_ENCODING); } public static void copy(Reader reader, Writer writer) throws IllegalStateException { final BufferedReader bufferedReader = getBufferedReader(reader); try { boolean firstLine = true; for (String line = bufferedReader.readLine(); line != null; line = bufferedReader.readLine()) { if (firstLine) { firstLine = false; } else { writer.write('\n'); } writer.write(line); } } catch (IOException e) { throw new IllegalStateException(e); } } public static BufferedReader getBufferedReader(Reader reader) { if (reader instanceof BufferedReader) { return (BufferedReader) reader; } return new BufferedReader(reader); } public static void copy(InputStream fromStream, OutputStream toStream) throws IllegalStateException { try { byte[] buffer = new byte[1024 * 32]; for (int read = fromStream.read(buffer); read != -1; read = fromStream.read(buffer)) { toStream.write(buffer, 0, read); } } catch (IOException e) { throw new IllegalStateException(e); } } public static void copy(Resource from, Resource to) throws IllegalStateException { assert from.isExists(); final InputStream in = from.read(); try { final OutputStream out = to.write(); try { copy(in, out); } finally { safeClose(out); } } finally { safeClose(in); } } public static void copy(File from, File to) throws IllegalStateException { assert from.exists(); final InputStream in = getInputStream(from); try { final OutputStream out = getOutputStream(to); try { copy(in, out); } finally { safeClose(out); } } finally { safeClose(in); } } public static OutputStream getOutputStream(File file) throws IllegalStateException { return getOutputStream(file, false); } public static OutputStream getOutputStream(File file, boolean append) { try { return new BufferedOutputStream(new FileOutputStream(file, append)); } catch (FileNotFoundException e) { throw new IllegalStateException(e); } } public static InputStream getInputStream(File file) throws IllegalStateException { try { return new BufferedInputStream(new FileInputStream(file)); } catch (FileNotFoundException e) { throw new IllegalStateException(e); } } public static byte[] readAsBytes(InputStream inputStream) { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { copy(inputStream, baos); } finally { safeClose(inputStream); } return baos.toByteArray(); } }
/* * Copyright (C) 2011-2012 Dr. John Lindsay <jlindsay@uoguelph.ca> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package plugins; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.PrintWriter; import whitebox.geospatialfiles.LASReader; import whitebox.geospatialfiles.LASReader.PointRecord; import whitebox.interfaces.WhiteboxPlugin; import whitebox.interfaces.WhiteboxPluginHost; /** * This tool can be used to convert a LAS file, containing LiDAR data, into an equivalent ASCII text file. * * @author Dr. John Lindsay email: jlindsay@uoguelph.ca */ public class LAS2ASCII implements WhiteboxPlugin { private WhiteboxPluginHost myHost = null; private String[] args; /** * Used to retrieve the plugin tool's name. This is a short, unique name * containing no spaces. * * @return String containing plugin name. */ @Override public String getName() { return "LAS2ASCII"; } /** * Used to retrieve the plugin tool's descriptive name. This can be a longer * name (containing spaces) and is used in the interface to list the tool. * * @return String containing the plugin descriptive name. */ @Override public String getDescriptiveName() { return "Convert LAS to ASCII (LAS2ASCII)"; } /** * Used to retrieve a short description of what the plugin tool does. * * @return String containing the plugin's description. */ @Override public String getToolDescription() { return "Converts a LAS file into an ASCII text file."; } /** * Used to identify which toolboxes this plugin tool should be listed in. * * @return Array of Strings. */ @Override public String[] getToolbox() { String[] ret = {"LidarTools", "ConversionTools"}; return ret; } /** * Sets the WhiteboxPluginHost to which the plugin tool is tied. This is the * class that the plugin will send all feedback messages, progress updates, * and return objects. * * @param host The WhiteboxPluginHost that called the plugin tool. */ @Override public void setPluginHost(WhiteboxPluginHost host) { myHost = host; } /** * Used to communicate feedback pop-up messages between a plugin tool and * the main Whitebox user-interface. * * @param feedback String containing the text to display. */ private void showFeedback(String message) { if (myHost != null) { myHost.showFeedback(message); } else { System.out.println(message); } } /** * Used to communicate a return object from a plugin tool to the main * Whitebox user-interface. * * @return Object, such as an output WhiteboxRaster. */ private void returnData(Object ret) { if (myHost != null) { myHost.returnData(ret); } } private int previousProgress = 0; private String previousProgressLabel = ""; /** * Used to communicate a progress update between a plugin tool and the main * Whitebox user interface. * * @param progressLabel A String to use for the progress label. * @param progress Float containing the progress value (between 0 and 100). */ private void updateProgress(String progressLabel, int progress) { if (myHost != null && ((progress != previousProgress) || (!progressLabel.equals(previousProgressLabel)))) { myHost.updateProgress(progressLabel, progress); } previousProgress = progress; previousProgressLabel = progressLabel; } /** * Used to communicate a progress update between a plugin tool and the main * Whitebox user interface. * * @param progress Float containing the progress value (between 0 and 100). */ private void updateProgress(int progress) { if (myHost != null && progress != previousProgress) { myHost.updateProgress(progress); } previousProgress = progress; } /** * Sets the arguments (parameters) used by the plugin. * * @param args An array of string arguments. */ @Override public void setArgs(String[] args) { this.args = args.clone(); } private boolean cancelOp = false; /** * Used to communicate a cancel operation from the Whitebox GUI. * * @param cancel Set to true if the plugin should be canceled. */ @Override public void setCancelOp(boolean cancel) { cancelOp = cancel; } private void cancelOperation() { showFeedback("Operation cancelled."); updateProgress("Progress: ", 0); } private boolean amIActive = false; /** * Used by the Whitebox GUI to tell if this plugin is still running. * * @return a boolean describing whether or not the plugin is actively being * used. */ @Override public boolean isActive() { return amIActive; } /** * Used to execute this plugin tool. */ @Override public void run() { amIActive = true; String inputFilesString = null; String[] pointFiles; double x, y; double z; int intensity; byte classValue, numReturns, returnNum; int a, n; int progress; int numPoints; FileWriter fw = null; BufferedWriter bw = null; PrintWriter out = null; // get the arguments if (args.length <= 0) { showFeedback("Plugin parameters have not been set."); return; } inputFilesString = args[0]; // check to see that the inputHeader and outputHeader are not null. if ((inputFilesString.length() <= 0)) { showFeedback("One or more of the input parameters have not been set properly."); return; } try { pointFiles = inputFilesString.split(";"); int numPointFiles = pointFiles.length; long numPointsInFile = 0; PointRecord point; //PointRecColours pointColours; for (int j = 0; j < numPointFiles; j++) { LASReader las = new LASReader(pointFiles[j]); long oneHundredthTotal = las.getNumPointRecords() / 100; // create the new text file File file = new File(pointFiles[j].replace(".las", ".txt")); if (file.exists()) { file.delete(); } fw = new FileWriter(file, false); bw = new BufferedWriter(fw); out = new PrintWriter(bw, true); progress = (int)((j + 1) * 100d / numPointFiles); updateProgress("Loop " + (j + 1) + " of " + numPointFiles + ":", progress); numPointsInFile = las.getNumPointRecords(); // first count how many valid points there are. numPoints = 0; n = 0; progress = 0; for (a = 0; a < numPointsInFile; a++) { point = las.getPointRecord(a); if (!point.isPointWithheld()) { x = point.getX(); y = point.getY(); z = point.getZ(); intensity = point.getIntensity(); classValue = point.getClassification(); returnNum = point.getReturnNumber(); numReturns = point.getNumberOfReturns(); out.println((a + 1) + " " + x + " " + y + " " + z + " " + intensity + " " + classValue + " " + returnNum + " " + numReturns); numPoints++; } n++; if (n >= oneHundredthTotal) { n = 0; if (cancelOp) { cancelOperation(); return; } progress++; updateProgress("Loop " + (j + 1) + " of " + numPointFiles + ":", progress); } } } } catch (OutOfMemoryError oe) { myHost.showFeedback("An out-of-memory error has occurred during operation."); } catch (Exception e) { myHost.showFeedback("An error has occurred during operation. See log file for details."); myHost.logException("Error in " + getDescriptiveName(), e); } finally { if (out != null || bw != null) { out.flush(); out.close(); } updateProgress("Progress: ", 0); // tells the main application that this process is completed. amIActive = false; myHost.pluginComplete(); } } // // this is only used for debugging the tool // public static void main(String[] args) { // LAS2ASCII L2A = new LAS2ASCII(); // args = new String[1]; // args[0] = "/Users/johnlindsay/Documents/Data/Rondeau LiDAR/LAS classified/403_4696.las"; // L2A.setArgs(args); // L2A.run(); // // } }
package org.gwtbootstrap3.extras.summernote.client.ui.base; /* * #%L * GwtBootstrap3 * %% * Copyright (C) 2013 - 2014 GwtBootstrap3 * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.ScriptInjector; import com.google.gwt.dom.client.Element; import com.google.gwt.user.client.Event; import com.google.web.bindery.event.shared.HandlerRegistration; import org.gwtbootstrap3.client.ui.TextArea; import org.gwtbootstrap3.extras.summernote.client.event.*; import org.gwtbootstrap3.extras.summernote.client.ui.SummernoteLanguage; /** * Wrapper for the Summernote WYSIWYG Editor * <p/> * See: http://hackerwins.github.io/summernote/ * * @author godi */ public class SummernoteBase extends TextArea { /** * Default settings */ private int height = 100; private boolean hasFocus = false; private String code; private Toolbar toolbar = buildDefaultToolbar(); private boolean styleWithSpan = true; private SummernoteLanguage language = SummernoteLanguage.EN; public SummernoteBase() { } public void setHeight(final int height) { this.height = height; } public void setHasFocus(final boolean hasFocus) { this.hasFocus = hasFocus; } public void setToolbar(final Toolbar toolbar) { this.toolbar = toolbar; } public void setCode(final String code) { this.code = code; } public void setStyleWithSpan(final boolean styleWithSpan) { this.styleWithSpan = styleWithSpan; } public void setLanguage(final SummernoteLanguage language) { this.language = language; // Inject the JS for the language if (language.getJs() != null) { ScriptInjector.fromString(language.getJs().getText()).setWindow(ScriptInjector.TOP_WINDOW).inject(); } reconfigure(); } public SummernoteLanguage getLanguage() { return language; } public HandlerRegistration addInitializedHandler(final SummernoteInitializedHandler handler) { return addHandler(handler, SummernoteInitializedEvent.getType()); } public HandlerRegistration addBlurHandler(final SummernoteOnBlurHandler handler) { return addHandler(handler, SummernoteOnBlurEvent.getType()); } public HandlerRegistration addEnterHandler(final SummernoteOnEnterHandler handler) { return addHandler(handler, SummernoteOnEnterEvent.getType()); } public HandlerRegistration addFocusHandler(final SummernoteOnFocusHandler handler) { return addHandler(handler, SummernoteOnFocusEvent.getType()); } public HandlerRegistration addImageUploadHandler(final SummernoteOnImageUploadHandler handler) { return addHandler(handler, SummernoteOnImageUploadEvent.getType()); } public HandlerRegistration addKeyDownHandler(final SummernoteOnKeyDownHandler handler) { return addHandler(handler, SummernoteOnKeyDownEvent.getType()); } public HandlerRegistration addKeyUpHandler(final SummernoteOnKeyUpHandler handler) { return addHandler(handler, SummernoteOnKeyUpEvent.getType()); } public HandlerRegistration addPasteHandler(final SummernoteOnPasteHandler handler) { return addHandler(handler, SummernoteOnPasteEvent.getType()); } /** * Gets the HTML code generated from the editor * * @return generated code */ public String getCode() { return getCode(getElement()); } @Override public String getText() { return getCode(getElement()); } @Override public void setText(String text) { setCode(getElement(), text); } /** * Call this when updating settings to ensure everything is up to date */ public void reconfigure() { destroy(getElement()); initialize(); } private Toolbar buildDefaultToolbar() { return new Toolbar().toggleAll(true); } private void initialize() { initialize(getElement(), height, hasFocus, styleWithSpan, toolbar.build(), language.getCode()); if (code != null) { setCode(getElement(), code); } } @Override protected void onLoad() { super.onLoad(); // Initialize initialize(); } @Override protected void onUnload() { super.onUnload(); // Destroy destroy(getElement()); } protected void onInitialize(final Event evt) { fireEvent(new SummernoteInitializedEvent(this, evt)); } protected void onBlue(final Event evt) { fireEvent(new SummernoteOnBlurEvent(this, evt)); } protected void onEnter(final Event evt) { fireEvent(new SummernoteOnEnterEvent(this, evt)); } protected void onFocus(final Event evt) { fireEvent(new SummernoteOnFocusEvent(this, evt)); } protected void onImageUpload(final Event evt) { fireEvent(new SummernoteOnImageUploadEvent(this, evt)); } protected void onKeyUp(final Event evt) { fireEvent(new SummernoteOnKeyUpEvent(this, evt)); } protected void onKeyDown(final Event evt) { fireEvent(new SummernoteOnKeyDownEvent(this, evt)); } protected void onPaste(final Event evt) { fireEvent(new SummernoteOnPasteEvent(this, evt)); } private native void initialize(Element e, int height, boolean hasFocus, boolean styleWithSpan, JavaScriptObject toolbar, String language) /*-{ var target = this; $wnd.jQuery(e).summernote({ height: height, focus: hasFocus, toolbar: toolbar, styleWithSpan: styleWithSpan, lang: language, onInit: function (evt) { target.@org.gwtbootstrap3.extras.summernote.client.ui.base.SummernoteBase::onInitialize(Lcom/google/gwt/user/client/Event;)(evt); }, onEnter: function (evt) { target.@org.gwtbootstrap3.extras.summernote.client.ui.base.SummernoteBase::onEnter(Lcom/google/gwt/user/client/Event;)(evt); }, onFocus: function (evt) { target.@org.gwtbootstrap3.extras.summernote.client.ui.base.SummernoteBase::onFocus(Lcom/google/gwt/user/client/Event;)(evt); }, onBlur: function (evt) { target.@org.gwtbootstrap3.extras.summernote.client.ui.base.SummernoteBase::onBlue(Lcom/google/gwt/user/client/Event;)(evt); }, onKeyup: function (evt) { target.@org.gwtbootstrap3.extras.summernote.client.ui.base.SummernoteBase::onKeyUp(Lcom/google/gwt/user/client/Event;)(evt); }, onKeydown: function (evt) { target.@org.gwtbootstrap3.extras.summernote.client.ui.base.SummernoteBase::onKeyDown(Lcom/google/gwt/user/client/Event;)(evt); }, onImageUpload: function (evt) { target.@org.gwtbootstrap3.extras.summernote.client.ui.base.SummernoteBase::onImageUpload(Lcom/google/gwt/user/client/Event;)(evt); }, onPaste: function (evt) { target.@org.gwtbootstrap3.extras.summernote.client.ui.base.SummernoteBase::onPaste(Lcom/google/gwt/user/client/Event;)(evt); } }); }-*/; private native void destroy(Element e) /*-{ $wnd.jQuery(e).destroy(); $wnd.jQuery(e).off('oninit'); $wnd.jQuery(e).off('onenter'); $wnd.jQuery(e).off('onfocus'); $wnd.jQuery(e).off('onkeydown'); $wnd.jQuery(e).off('onblur'); $wnd.jQuery(e).off('onkeyup'); $wnd.jQuery(e).off('ononkeydowninit'); $wnd.jQuery(e).off('onImageUpload'); $wnd.jQuery(e).off('onpaste'); }-*/; private native void setCode(Element e, String code) /*-{ $wnd.jQuery(e).code(code); }-*/; private native String getCode(Element e)/*-{ return $wnd.jQuery(e).code(); }-*/; }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2010 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.history; import java.awt.Frame; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.HeadlessException; import java.awt.Insets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Vector; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import javax.swing.BoxLayout; import javax.swing.DefaultListModel; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextArea; import javax.swing.ListSelectionModel; import org.parosproxy.paros.Constant; import org.parosproxy.paros.core.scanner.Alert; import org.parosproxy.paros.extension.AbstractDialog; import org.parosproxy.paros.extension.history.HistoryFilter; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.network.HttpRequestHeader; import org.parosproxy.paros.network.HttpStatusCode; import org.parosproxy.paros.view.View; import org.zaproxy.zap.utils.ZapLabel; import org.zaproxy.zap.view.LayoutHelper; public class HistoryFilterPlusDialog extends AbstractDialog { private static final long serialVersionUID = 1L; private JPanel jPanel = null; private JButton btnApply = null; private JButton btnCancel = null; private JPanel jPanel1 = null; private int exitResult = JOptionPane.CANCEL_OPTION; private HistoryFilter filter = new HistoryFilter(); private JButton btnReset = null; private JPanel jPanel2 = null; private JList<String> methodList = null; private JList<Integer> codeList = null; private JList<String> riskList = null; private JList<String> confidenceList = null; private JList<String> tagList = null; private JTextArea regexInc = null; private JTextArea regexExc = null; private DefaultListModel<String> tagModel = null; private JScrollPane methodScroller = null; private JScrollPane codeScroller = null; private JScrollPane tagScroller = null; private JScrollPane riskScroller = null; private JScrollPane confidenceScroller = null; private JComboBox<String> notesComboBox = null; private JScrollPane urlRegxIncScroller = null; private JScrollPane urlRegxExcScroller = null; /** * +----------------------------------------------------------------------+ | Methods Codes Tags * Alerts Inc URL Regexes | | +----------+ +-----+ +-----------+ +---------------+ * +-------------+ | | | OPTIONS | | 100 | | | | Informational | | | | | | | | | | | | Low | | | * | | | | | | | | | Medium | | | | | | | | | | | | High | | | | | | | | | | | +---------------+ * +-------------+ | | | | | | | | +---------------+ Exc URL Regexes | | | | | | | | | False * Positive| +-------------+ | | | | | | | | | Low | | | | | | | | | | | | Medium | | | | | | | * | | | | | High | | | | | | | | | | | | Confirmed | | | | | +----------+ +-----+ +-----------+ * +---------------+ +-------------+ | | Notes [Ignore [v]] Images [Include [v]] | | [ Cancel ] * [Clear ] [Apply ] | +----------------------------------------------------------------------+ */ /** @throws HeadlessException */ public HistoryFilterPlusDialog() throws HeadlessException { super(); initialize(); } /** * @param arg0 * @param arg1 * @throws HeadlessException */ public HistoryFilterPlusDialog(Frame arg0, boolean arg1) throws HeadlessException { super(arg0, arg1); initialize(); } /** This method initializes this */ private void initialize() { this.setContentPane(getJPanel()); this.setVisible(false); this.setTitle(Constant.messages.getString("history.filter.title")); if (Model.getSingleton().getOptionsParam().getViewParam().getWmUiHandlingOption() == 0) { this.setSize(600, 300); } centreDialog(); this.getRootPane().setDefaultButton(btnApply); this.pack(); } /** * This method initializes jPanel * * @return javax.swing.JPanel */ private JPanel getJPanel() { if (jPanel == null) { GridBagConstraints gridBagConstraints12 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints11 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints6 = new GridBagConstraints(); ZapLabel descLabel = new ZapLabel(); descLabel.setLineWrap(true); descLabel.setWrapStyleWord(true); descLabel.setText(Constant.messages.getString("history.filter.label.desc")); jPanel = new JPanel(); jPanel.setLayout(new GridBagLayout()); gridBagConstraints6.gridwidth = 3; gridBagConstraints6.gridx = 0; gridBagConstraints6.gridy = 3; gridBagConstraints6.insets = new java.awt.Insets(5, 2, 5, 2); gridBagConstraints6.ipadx = 3; gridBagConstraints6.ipady = 3; gridBagConstraints11.gridx = 0; gridBagConstraints11.gridy = 0; gridBagConstraints11.insets = new java.awt.Insets(5, 10, 5, 10); gridBagConstraints11.weightx = 1.0D; gridBagConstraints11.gridwidth = 3; gridBagConstraints11.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints11.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints11.ipadx = 3; gridBagConstraints11.ipady = 3; gridBagConstraints12.gridx = 0; gridBagConstraints12.weighty = 1.0D; gridBagConstraints12.gridwidth = 3; gridBagConstraints12.gridy = 2; gridBagConstraints12.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints12.insets = new java.awt.Insets(2, 10, 2, 10); gridBagConstraints12.ipadx = 0; gridBagConstraints12.ipady = 1; jPanel.add(descLabel, gridBagConstraints11); jPanel.add(getJPanel2(), gridBagConstraints12); jPanel.add(getJPanel1(), gridBagConstraints6); } return jPanel; } /** * This method initializes btnApply * * @return javax.swing.JButton */ private JButton getBtnApply() { if (btnApply == null) { btnApply = new JButton(); btnApply.setText(Constant.messages.getString("history.filter.button.apply")); btnApply.addActionListener( new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { try { filter.setMethods(methodList.getSelectedValuesList()); filter.setCodes(codeList.getSelectedValuesList()); filter.setTags(tagList.getSelectedValuesList()); filter.setRisks(riskList.getSelectedValuesList()); filter.setReliabilities(confidenceList.getSelectedValuesList()); filter.setNote(notesComboBox.getSelectedItem()); filter.setUrlIncPatternList(strToRegexList(regexInc.getText())); filter.setUrlExcPatternList(strToRegexList(regexExc.getText())); exitResult = JOptionPane.OK_OPTION; HistoryFilterPlusDialog.this.dispose(); } catch (PatternSyntaxException e1) { // Invalid regex View.getSingleton() .showWarningDialog( Constant.messages.getString( "history.filter.badregex.warning", e1.getMessage())); } } }); } return btnApply; } private List<Pattern> strToRegexList(String str) throws PatternSyntaxException { List<Pattern> list = new ArrayList<>(); for (String s : str.split("\n")) { if (s.length() > 0) { list.add(Pattern.compile(s)); } } return list; } /** * This method initializes btnCancel * * @return javax.swing.JButton */ private JButton getBtnCancel() { if (btnCancel == null) { btnCancel = new JButton(); btnCancel.setText(Constant.messages.getString("all.button.cancel")); btnCancel.addActionListener( new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { exitResult = JOptionPane.CANCEL_OPTION; HistoryFilterPlusDialog.this.dispose(); } }); } return btnCancel; } /** * This method initializes jPanel1 * * @return javax.swing.JPanel */ private JPanel getJPanel1() { if (jPanel1 == null) { jPanel1 = new JPanel(); jPanel1.add(getBtnCancel(), null); jPanel1.add(getBtnReset(), null); jPanel1.add(getBtnApply(), null); } return jPanel1; } public int showDialog() { this.setVisible(true); return exitResult; } /** * This method initializes btnReset * * @return javax.swing.JButton */ private JButton getBtnReset() { if (btnReset == null) { btnReset = new JButton(); btnReset.setText(Constant.messages.getString("history.filter.button.clear")); btnReset.addActionListener( new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { exitResult = JOptionPane.NO_OPTION; // Unset everything methodList.setSelectedIndices(new int[0]); codeList.setSelectedIndices(new int[0]); tagList.setSelectedIndices(new int[0]); riskList.setSelectedIndices(new int[0]); confidenceList.setSelectedIndices(new int[0]); notesComboBox.setSelectedItem(HistoryFilter.NOTES_IGNORE); regexInc.setText(""); regexExc.setText(""); filter.reset(); } }); } return btnReset; } private Insets stdInset() { return new Insets(5, 5, 1, 5); } /** * This method initializes jPanel2 * * @return javax.swing.JPanel */ private JPanel getJPanel2() { if (jPanel2 == null) { jPanel2 = new JPanel(); jPanel2.setLayout(new GridBagLayout()); GridBagConstraints gbc00 = LayoutHelper.getGBC(0, 0, 1, 1.0, stdInset()); GridBagConstraints gbc01 = LayoutHelper.getGBC(1, 0, 1, 1.0, stdInset()); GridBagConstraints gbc02 = LayoutHelper.getGBC(2, 0, 1, 1.0, stdInset()); GridBagConstraints gbc03 = LayoutHelper.getGBC(3, 0, 1, 1.0, stdInset()); GridBagConstraints gbc04 = LayoutHelper.getGBC(4, 0, 1, 1.0, stdInset()); GridBagConstraints gbc10 = LayoutHelper.getGBC( 0, 1, 1, 3, 1.0, 1.0, GridBagConstraints.BOTH, GridBagConstraints.NORTHWEST, stdInset()); GridBagConstraints gbc11 = LayoutHelper.getGBC( 1, 1, 1, 3, 1.0, 1.0, GridBagConstraints.BOTH, GridBagConstraints.NORTHWEST, stdInset()); GridBagConstraints gbc12 = LayoutHelper.getGBC( 2, 1, 1, 3, 1.0, 1.0, GridBagConstraints.BOTH, GridBagConstraints.NORTHWEST, stdInset()); GridBagConstraints gbc13 = LayoutHelper.getGBC( 3, 1, 1, 2, 1.0, 1.0, GridBagConstraints.BOTH, GridBagConstraints.NORTHWEST, stdInset()); GridBagConstraints gbc14 = LayoutHelper.getGBC( 4, 1, 1, 1, 1.0, 1.0, GridBagConstraints.BOTH, GridBagConstraints.NORTHWEST, stdInset()); GridBagConstraints gbc24 = LayoutHelper.getGBC( 4, 2, 1, 1, 0.0, 0.0, GridBagConstraints.NONE, GridBagConstraints.NORTHWEST, stdInset()); GridBagConstraints gbc33 = LayoutHelper.getGBC( 3, 3, 1, 1, 1.0, 1.0, GridBagConstraints.BOTH, GridBagConstraints.NORTHWEST, stdInset()); GridBagConstraints gbc34 = LayoutHelper.getGBC( 4, 3, 1, 1, 0.0, 0.0, GridBagConstraints.BOTH, GridBagConstraints.NORTHWEST, stdInset()); GridBagConstraints gbc30 = LayoutHelper.getGBC(0, 4, 2, 1.0, stdInset()); jPanel2.add( new JLabel(Constant.messages.getString("history.filter.label.methods")), gbc00); jPanel2.add( new JLabel(Constant.messages.getString("history.filter.label.codes")), gbc01); jPanel2.add( new JLabel(Constant.messages.getString("history.filter.label.tags")), gbc02); jPanel2.add( new JLabel(Constant.messages.getString("history.filter.label.alerts")), gbc03); jPanel2.add( new JLabel(Constant.messages.getString("history.filter.label.urlincregex")), gbc04); jPanel2.add(getMethodScroller(), gbc10); jPanel2.add(getCodeScroller(), gbc11); jPanel2.add(getTagScroller(), gbc12); jPanel2.add(getRiskScroller(), gbc13); jPanel2.add(getUrlRegxIncScroller(), gbc14); jPanel2.add( new JLabel(Constant.messages.getString("history.filter.label.urlexcregex")), gbc24); jPanel2.add(getConfidenceScroller(), gbc33); jPanel2.add(getUrlRegxExcScroller(), gbc34); getUrlRegxExcScroller(); JPanel jPanel3 = new JPanel(); jPanel3.setLayout(new BoxLayout(jPanel3, BoxLayout.X_AXIS)); jPanel3.add(new JLabel(Constant.messages.getString("history.filter.label.notes"))); jPanel3.add(getNotesComboBox()); jPanel2.add(jPanel3, gbc30); } return jPanel2; } private JScrollPane getMethodScroller() { if (methodScroller == null) { methodList = new JList<>(HttpRequestHeader.METHODS); methodList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); methodList.setLayoutOrientation(JList.VERTICAL); methodList.setVisibleRowCount(HttpRequestHeader.METHODS.length); methodScroller = new JScrollPane(methodList); } return methodScroller; } private JScrollPane getCodeScroller() { if (codeScroller == null) { Vector<Integer> codeInts = new Vector<>(HttpStatusCode.CODES.length); for (int i : HttpStatusCode.CODES) { codeInts.add(i); } codeList = new JList<>(codeInts); codeList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); codeList.setLayoutOrientation(JList.VERTICAL); codeScroller = new JScrollPane(codeList); } return codeScroller; } private JScrollPane getRiskScroller() { if (riskScroller == null) { riskList = new JList<>(Alert.MSG_RISK); riskList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); riskList.setLayoutOrientation(JList.VERTICAL); riskList.setVisibleRowCount(Alert.MSG_RISK.length); riskScroller = new JScrollPane(riskList); } return riskScroller; } private JScrollPane getConfidenceScroller() { if (confidenceScroller == null) { confidenceList = new JList<>(Alert.MSG_CONFIDENCE); confidenceList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); confidenceList.setLayoutOrientation(JList.VERTICAL); confidenceList.setVisibleRowCount(Alert.MSG_CONFIDENCE.length); confidenceScroller = new JScrollPane(confidenceList); } return confidenceScroller; } private JScrollPane getUrlRegxIncScroller() { if (urlRegxIncScroller == null) { regexInc = new JTextArea(); regexInc.setRows(4); urlRegxIncScroller = new JScrollPane(regexInc); } return urlRegxIncScroller; } private JScrollPane getUrlRegxExcScroller() { if (urlRegxExcScroller == null) { regexExc = new JTextArea(); regexExc.setRows(5); urlRegxExcScroller = new JScrollPane(regexExc); } return urlRegxExcScroller; } private DefaultListModel<String> getTagModel() { if (tagModel == null) { tagModel = new DefaultListModel<>(); } return tagModel; } private JScrollPane getTagScroller() { if (tagScroller == null) { tagList = new JList<>(getTagModel()); tagList.setPrototypeCellValue("Tags are short..."); tagScroller = new JScrollPane(tagList); tagScroller.setHorizontalScrollBarPolicy( javax.swing.JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); tagScroller.setVerticalScrollBarPolicy( javax.swing.JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED); } return tagScroller; } private JComboBox<String> getNotesComboBox() { if (notesComboBox == null) { notesComboBox = new JComboBox<>(HistoryFilter.NOTES_OPTIONS); } return notesComboBox; } public void setAllTags(List<String> allTags) { List<String> selected = tagList.getSelectedValuesList(); int[] inds = new int[allTags.size()]; Arrays.fill(inds, -1); getTagModel().clear(); int i = 0; for (String tag : allTags) { getTagModel().addElement(tag); } for (Object sel : selected) { if (getTagModel().contains(sel)) { inds[i] = getTagModel().indexOf(sel); } i++; } tagList.setSelectedIndices(inds); } public HistoryFilter getFilter() { return this.filter; } }
package org.bouncycastle.crypto.paddings; import org.bouncycastle.crypto.*; import org.bouncycastle.crypto.params.ParametersWithRandom; /** * A wrapper class that allows block ciphers to be used to process data in * a piecemeal fashion with padding. The PaddedBufferedBlockCipher * outputs a block only when the buffer is full and more data is being added, * or on a doFinal (unless the current block in the buffer is a pad block). * The default padding mechanism used is the one outlined in PKCS5/PKCS7. */ public class PaddedBufferedBlockCipher extends BufferedBlockCipher { BlockCipherPadding padding; /** * Create a buffered block cipher with the desired padding. * * @param cipher the underlying block cipher this buffering object wraps. * @param padding the padding type. */ public PaddedBufferedBlockCipher( BlockCipher cipher, BlockCipherPadding padding) { this.cipher = cipher; this.padding = padding; buf = new byte[cipher.getBlockSize()]; bufOff = 0; } /** * Create a buffered block cipher PKCS7 padding * * @param cipher the underlying block cipher this buffering object wraps. */ public PaddedBufferedBlockCipher( BlockCipher cipher) { this(cipher, new PKCS7Padding()); } /** * initialise the cipher. * * @param forEncryption if true the cipher is initialised for * encryption, if false for decryption. * @param params the key and other data required by the cipher. * @exception IllegalArgumentException if the params argument is * inappropriate. */ public void init( boolean forEncryption, CipherParameters params) throws IllegalArgumentException { this.forEncryption = forEncryption; reset(); if (params instanceof ParametersWithRandom) { ParametersWithRandom p = (ParametersWithRandom)params; padding.init(p.getRandom()); cipher.init(forEncryption, p.getParameters()); } else { padding.init(null); cipher.init(forEncryption, params); } } /** * return the minimum size of the output buffer required for an update * plus a doFinal with an input of len bytes. * * @param len the length of the input. * @return the space required to accommodate a call to update and doFinal * with len bytes of input. */ public int getOutputSize( int len) { int total = len + bufOff; int leftOver = total % buf.length; if (leftOver == 0) { if (forEncryption) { return total + buf.length; } return total; } return total - leftOver + buf.length; } /** * return the size of the output buffer required for an update * an input of len bytes. * * @param len the length of the input. * @return the space required to accommodate a call to update * with len bytes of input. */ public int getUpdateOutputSize( int len) { int total = len + bufOff; int leftOver = total % buf.length; if (leftOver == 0) { return Math.max(0, total - buf.length); } return total - leftOver; } /** * process a single byte, producing an output block if neccessary. * * @param in the input byte. * @param out the space for any output that might be produced. * @param outOff the offset from which the output will be copied. * @return the number of output bytes copied to out. * @exception DataLengthException if there isn't enough space in out. * @exception IllegalStateException if the cipher isn't initialised. */ public int processByte( byte in, byte[] out, int outOff) throws DataLengthException, IllegalStateException { int resultLen = 0; if (bufOff == buf.length) { resultLen = cipher.processBlock(buf, 0, out, outOff); bufOff = 0; } buf[bufOff++] = in; return resultLen; } /** * process an array of bytes, producing output if necessary. * * @param in the input byte array. * @param inOff the offset at which the input data starts. * @param len the number of bytes to be copied out of the input array. * @param out the space for any output that might be produced. * @param outOff the offset from which the output will be copied. * @return the number of output bytes copied to out. * @exception DataLengthException if there isn't enough space in out. * @exception IllegalStateException if the cipher isn't initialised. */ public int processBytes( byte[] in, int inOff, int len, byte[] out, int outOff) throws DataLengthException, IllegalStateException { if (len < 0) { throw new IllegalArgumentException("Can't have a negative input length!"); } int blockSize = getBlockSize(); int length = getUpdateOutputSize(len); if (length > 0) { if ((outOff + length) > out.length) { throw new OutputLengthException("output buffer too short"); } } int resultLen = 0; int gapLen = buf.length - bufOff; if (len > gapLen) { System.arraycopy(in, inOff, buf, bufOff, gapLen); resultLen += cipher.processBlock(buf, 0, out, outOff); bufOff = 0; len -= gapLen; inOff += gapLen; while (len > buf.length) { resultLen += cipher.processBlock(in, inOff, out, outOff + resultLen); len -= blockSize; inOff += blockSize; } } System.arraycopy(in, inOff, buf, bufOff, len); bufOff += len; return resultLen; } /** * Process the last block in the buffer. If the buffer is currently * full and padding needs to be added a call to doFinal will produce * 2 * getBlockSize() bytes. * * @param out the array the block currently being held is copied into. * @param outOff the offset at which the copying starts. * @return the number of output bytes copied to out. * @exception DataLengthException if there is insufficient space in out for * the output or we are decrypting and the input is not block size aligned. * @exception IllegalStateException if the underlying cipher is not * initialised. * @exception InvalidCipherTextException if padding is expected and not found. */ public int doFinal( byte[] out, int outOff) throws DataLengthException, IllegalStateException, InvalidCipherTextException { int blockSize = cipher.getBlockSize(); int resultLen = 0; if (forEncryption) { if (bufOff == blockSize) { if ((outOff + 2 * blockSize) > out.length) { reset(); throw new OutputLengthException("output buffer too short"); } resultLen = cipher.processBlock(buf, 0, out, outOff); bufOff = 0; } padding.addPadding(buf, bufOff); resultLen += cipher.processBlock(buf, 0, out, outOff + resultLen); reset(); } else { if (bufOff == blockSize) { resultLen = cipher.processBlock(buf, 0, buf, 0); bufOff = 0; } else { reset(); throw new DataLengthException("last block incomplete in decryption"); } try { resultLen -= padding.padCount(buf); System.arraycopy(buf, 0, out, outOff, resultLen); } finally { reset(); } } return resultLen; } }
package carbon.drawable; /** * Created by Marcin on 2015-02-28. */ import android.annotation.TargetApi; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffXfermode; import android.graphics.Rect; import android.util.FloatMath; import android.view.animation.AnimationUtils; import android.view.animation.DecelerateInterpolator; import android.view.animation.Interpolator; @TargetApi(14) public class EdgeEffect{ @SuppressWarnings("UnusedDeclaration") private static final String TAG = "EdgeEffect"; private static final int RECEDE_TIME = 600; private static final int PULL_TIME = 167; private static final int PULL_DECAY_TIME = 2000; private static final float MAX_ALPHA = 0.5f; private static final float MAX_GLOW_SCALE = 2.f; private static final float PULL_GLOW_BEGIN = 0.f; private static final int MIN_VELOCITY = 100; private static final int MAX_VELOCITY = 10000; private static final float EPSILON = 0.001f; private static final double ANGLE = Math.PI / 6; private static final float SIN = (float) Math.sin(ANGLE); private static final float COS = (float) Math.cos(ANGLE); private static final int STATE_IDLE = 0; private static final int STATE_PULL = 1; private static final int STATE_ABSORB = 2; private static final int STATE_RECEDE = 3; private static final int STATE_PULL_DECAY = 4; private static final float PULL_DISTANCE_ALPHA_GLOW_FACTOR = 0.8f; private static final int VELOCITY_GLOW_FACTOR = 6; private float mGlowAlpha; private float mGlowScaleY; private float mGlowAlphaStart; private float mGlowAlphaFinish; private float mGlowScaleYStart; private float mGlowScaleYFinish; private long mStartTime; private float mDuration; private final Interpolator mInterpolator; private int mState = STATE_IDLE; private float mPullDistance; private final Rect mBounds = new Rect(); private final Paint mPaint = new Paint(); private float mRadius; private float mBaseGlowScale; private float mDisplacement = 0.5f; private float mTargetDisplacement = 0.5f; /** * Construct a new EdgeEffect with a theme appropriate for the provided context. * * @param context Context used to provide theming and resource information for the EdgeEffect */ public EdgeEffect(Context context) { mPaint.setAntiAlias(true); //final TypedArray a = context.obtainStyledAttributes( // com.android.internal.R.styleable.EdgeEffect); final int themeColor = Color.RED;//a.getColor( //com.android.internal.R.styleable.EdgeEffect_colorEdgeEffect, 0xff666666); //a.recycle(); mPaint.setColor((themeColor & 0xffffff) | 0x33000000); mPaint.setStyle(Paint.Style.FILL); mPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_ATOP)); mInterpolator = new DecelerateInterpolator(); } /** * Set the size of this edge effect in pixels. * * @param width Effect width in pixels * @param height Effect height in pixels */ public void setSize(int width, int height) { final float r = width * 0.75f / SIN; final float y = COS * r; final float h = r - y; final float or = height * 0.75f / SIN; final float oy = COS * or; final float oh = or - oy; mRadius = r; mBaseGlowScale = h > 0 ? Math.min(oh / h, 1.f) : 1.f; mBounds.set(mBounds.left, mBounds.top, width, (int) Math.min(height, h)); } public boolean isFinished() { return mState == STATE_IDLE; } public void finish() { mState = STATE_IDLE; } public void onPull(float deltaDistance) { onPull(deltaDistance, 0.5f); } public void onPull(float deltaDistance, float displacement) { final long now = AnimationUtils.currentAnimationTimeMillis(); mTargetDisplacement = displacement; if (mState == STATE_PULL_DECAY && now - mStartTime < mDuration) { return; } if (mState != STATE_PULL) { mGlowScaleY = Math.max(PULL_GLOW_BEGIN, mGlowScaleY); } mState = STATE_PULL; mStartTime = now; mDuration = PULL_TIME; mPullDistance += deltaDistance; final float absdd = Math.abs(deltaDistance); mGlowAlpha = mGlowAlphaStart = Math.min(MAX_ALPHA, mGlowAlpha + (absdd * PULL_DISTANCE_ALPHA_GLOW_FACTOR)); if (mPullDistance == 0) { mGlowScaleY = mGlowScaleYStart = 0; } else { final float scale = Math.max(0, 1 - 1 / FloatMath.sqrt(Math.abs(mPullDistance) * mBounds.height()) - 0.3f) / 0.7f; mGlowScaleY = mGlowScaleYStart = scale; } mGlowAlphaFinish = mGlowAlpha; mGlowScaleYFinish = mGlowScaleY; } public void onRelease() { mPullDistance = 0; if (mState != STATE_PULL && mState != STATE_PULL_DECAY) { return; } mState = STATE_RECEDE; mGlowAlphaStart = mGlowAlpha; mGlowScaleYStart = mGlowScaleY; mGlowAlphaFinish = 0.f; mGlowScaleYFinish = 0.f; mStartTime = AnimationUtils.currentAnimationTimeMillis(); mDuration = RECEDE_TIME; } public void onAbsorb(int velocity) { mState = STATE_ABSORB; velocity = Math.min(Math.max(MIN_VELOCITY, Math.abs(velocity)), MAX_VELOCITY); mStartTime = AnimationUtils.currentAnimationTimeMillis(); mDuration = 0.15f + (velocity * 0.02f); // The glow depends more on the velocity, and therefore starts out // nearly invisible. mGlowAlphaStart = 0.3f; mGlowScaleYStart = Math.max(mGlowScaleY, 0.f); // Growth for the size of the glow should be quadratic to properly // respond // to a user's scrolling speed. The faster the scrolling speed, the more // intense the effect should be for both the size and the saturation. mGlowScaleYFinish = Math.min(0.025f + (velocity * (velocity / 100) * 0.00015f) / 2, 1.f); // Alpha should change for the glow as well as size. mGlowAlphaFinish = Math.max( mGlowAlphaStart, Math.min(velocity * VELOCITY_GLOW_FACTOR * .00001f, MAX_ALPHA)); mTargetDisplacement = 0.5f; } public void setColor(int color) { mPaint.setColor(color); } public int getColor() { return mPaint.getColor(); } public boolean draw(Canvas canvas) { update(); final int count = canvas.save(); final float centerX = mBounds.centerX(); final float centerY = mBounds.height() - mRadius; canvas.scale(1.f, Math.min(mGlowScaleY, 1.f) * mBaseGlowScale, centerX, 0); final float displacement = Math.max(0, Math.min(mDisplacement, 1.f)) - 0.5f; float translateX = mBounds.width() * displacement / 2; canvas.clipRect(mBounds); canvas.translate(translateX, 0); mPaint.setAlpha((int) (0xff * mGlowAlpha)); canvas.drawCircle(centerX, centerY, mRadius, mPaint); canvas.restoreToCount(count); boolean oneLastFrame = false; if (mState == STATE_RECEDE && mGlowScaleY == 0) { mState = STATE_IDLE; oneLastFrame = true; } return mState != STATE_IDLE || oneLastFrame; } public int getMaxHeight() { return (int) (mBounds.height() * MAX_GLOW_SCALE + 0.5f); } private void update() { final long time = AnimationUtils.currentAnimationTimeMillis(); final float t = Math.min((time - mStartTime) / mDuration, 1.f); final float interp = mInterpolator.getInterpolation(t); mGlowAlpha = mGlowAlphaStart + (mGlowAlphaFinish - mGlowAlphaStart) * interp; mGlowScaleY = mGlowScaleYStart + (mGlowScaleYFinish - mGlowScaleYStart) * interp; mDisplacement = (mDisplacement + mTargetDisplacement) / 2; if (t >= 1.f - EPSILON) { switch (mState) { case STATE_ABSORB: mState = STATE_RECEDE; mStartTime = AnimationUtils.currentAnimationTimeMillis(); mDuration = RECEDE_TIME; mGlowAlphaStart = mGlowAlpha; mGlowScaleYStart = mGlowScaleY; // After absorb, the glow should fade to nothing. mGlowAlphaFinish = 0.f; mGlowScaleYFinish = 0.f; break; case STATE_PULL: mState = STATE_PULL_DECAY; mStartTime = AnimationUtils.currentAnimationTimeMillis(); mDuration = PULL_DECAY_TIME; mGlowAlphaStart = mGlowAlpha; mGlowScaleYStart = mGlowScaleY; // After pull, the glow should fade to nothing. mGlowAlphaFinish = 0.f; mGlowScaleYFinish = 0.f; break; case STATE_PULL_DECAY: mState = STATE_RECEDE; break; case STATE_RECEDE: mState = STATE_IDLE; break; } } } }
package org.osiam.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.UUID; import org.junit.Test; import org.junit.runner.RunWith; import org.osiam.client.exception.ConflictException; import org.osiam.client.exception.NoResultException; import org.osiam.client.query.Query; import org.osiam.client.query.QueryResult; import org.osiam.client.query.metamodel.Group_; import org.osiam.resources.scim.Group; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestExecutionListeners; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.support.DependencyInjectionTestExecutionListener; import com.github.springtestdbunit.DbUnitTestExecutionListener; import com.github.springtestdbunit.annotation.DatabaseSetup; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration("/context.xml") @TestExecutionListeners({DependencyInjectionTestExecutionListener.class, DbUnitTestExecutionListener.class}) @DatabaseSetup("/database_seed.xml") public class EditGroupServiceIT extends AbstractIntegrationTestBase{ private static String ID_EXISTING_GROUP = "69e1a5dc-89be-4343-976c-b5541af249f4"; private static String NEW_ID = UUID.randomUUID().toString(); private static String NAME_EXISTING_GROUP = "test_group01"; private static final String IRRELEVANT = "irrelevant"; private String validId = null; private Group newGroup; private Group returnGroup; private Group dbGroup; private Query query; @Test (expected = ConflictException.class) public void create_group_with_no_username_raises_exception(){ initializeGroupWithNoUserName(); createGroup(); fail("Exception excpected"); } @Test (expected = ConflictException.class) public void create_group_with_existing_displayName_raises_exception(){ initializeGroupWithExistingDisplayName(); createGroup(); fail("Exception excpected"); } @Test (expected = ConflictException.class) public void create_empty_group_raises_exception(){ initializeGroupWithEmptyDisplayName(); createGroup(); fail("Exception excpected"); } @Test public void create_simple_Group(){ initializeSimpleGroup(); createGroup(); returnGroupHasValidId(); loadGroup(returnGroup.getId()); returnAndDbGroupHaveSameDislplayName(); } @Test public void create_group_with_existing_id(){ initializeSimpleGroupWithID(ID_EXISTING_GROUP.toString()); createGroup(); loadGroup(ID_EXISTING_GROUP); existingGroupDislpayNameHasNotChanged(); } @Test public void given_id_to_new_group_has_changed_after_saving() { initializeSimpleGroupWithID(NEW_ID.toString()); createGroup(); assertNotEquals(NEW_ID.toString(), returnGroup.getId()); } @Test public void created_group_can_be_found(){ initialQueryToSearchGroup(); loadSingleGroupByQuery(); assertNull(dbGroup); initializeSimpleGroup(); createGroup(); loadSingleGroupByQuery(); assertNotNull(dbGroup); assertEquals(IRRELEVANT, dbGroup.getDisplayName()); } @Test public void id_return_group_same_as_new_loaded_id(){ initializeSimpleGroupWithID(NEW_ID.toString()); createGroup(); initialQueryToSearchGroup(); loadSingleGroupByQuery(); assertNotNull(dbGroup); assertEquals(returnGroup.getId(), dbGroup.getId()); } @Test public void group_is_deleted() throws Exception { given_a_test_group_ID(); whenGroupIsDeleted(); thenGroupIsRemoveFromServer(); } @Test (expected = NoResultException.class) public void user_is_not_deleted() throws Exception { givenAValidUserIDForDeletion(); whenUserIsDeleted(); fail(); } @Test (expected = NoResultException.class) public void delete_group_two_times() throws Exception { given_a_test_group_ID(); whenGroupIsDeleted(); thenGroupIsRemoveFromServer(); whenGroupIsDeleted(); fail(); } @Test public void delete_group_with_members(){ String idGroup01 = "69e1a5dc-89be-4343-976c-b5541af249f4"; oConnector.getGroup(idGroup01, accessToken); //group could be found oConnector.deleteGroup(idGroup01, accessToken); try{ oConnector.getGroup(idGroup01, accessToken); fail("Exception excpected"); }catch (NoResultException e){} } private void given_a_test_group_ID() { validId = VALID_GROUP_ID; } private void whenGroupIsDeleted() { oConnector.deleteGroup(validId, accessToken); } private void thenGroupIsRemoveFromServer() { try { oConnector.getGroup(validId, accessToken); } catch(NoResultException e) { return; } catch(Exception e) { fail(Arrays.toString(e.getStackTrace())); } fail(); } private void givenAValidUserIDForDeletion() throws Exception { validId = DELETE_USER_ID; } private void whenUserIsDeleted() { oConnector.deleteGroup(validId, accessToken); } private void initializeGroupWithNoUserName(){ newGroup = new Group.Builder().build(); } private void initializeGroupWithEmptyDisplayName(){ newGroup = new Group.Builder().setDisplayName("").build(); } private void initializeSimpleGroup(){ newGroup = new Group.Builder().setDisplayName(IRRELEVANT).build(); } private void initializeSimpleGroupWithID(String id){ newGroup = new Group.Builder().setDisplayName(IRRELEVANT).setId(id).build(); } private void initializeGroupWithExistingDisplayName(){ newGroup = new Group.Builder().setDisplayName(NAME_EXISTING_GROUP).build(); } private void returnGroupHasValidId(){ assertTrue(returnGroup.getId().length() > 0); } private void loadGroup(String id){ dbGroup = oConnector.getGroup(id, accessToken); } private void loadSingleGroupByQuery(){ QueryResult<Group> result = oConnector.searchGroups(query, accessToken); if(result.getResources().size() == 0){ dbGroup = null; }else if(result.getResources().size() == 1){ dbGroup = result.getResources().get(0); }else{ fail("No or one user should be found"); } } private void existingGroupDislpayNameHasNotChanged(){ assertEquals(NAME_EXISTING_GROUP, dbGroup.getDisplayName()); } private void returnAndDbGroupHaveSameDislplayName(){ assertEquals(newGroup.getDisplayName(), dbGroup.getDisplayName()); } private void createGroup(){ returnGroup = oConnector.createGroup(newGroup, accessToken); } private void initialQueryToSearchGroup(){ query = new Query.Builder(Group.class).setFilter(new Query.Filter(Group.class, Group_.displayName.equalTo(IRRELEVANT))).build(); } }
package com.lanian.btbeacon; import java.io.IOException; import java.lang.ref.WeakReference; import java.util.UUID; import java.util.Vector; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.app.Service; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothServerSocket; import android.bluetooth.BluetoothSocket; import android.content.Context; import android.content.Intent; import android.database.ContentObserver; import android.database.Cursor; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.os.Message; import android.os.Messenger; import android.os.RemoteException; import android.util.Log; public class BeaconService extends Service implements Runnable, BeaconConnection.BeaconConnectionListener { static final String SERVICE_NAME = "BlueBeacon"; public static final UUID SERVICE_UUID = UUID.fromString("c9faf940-e20c-11e3-9ffa-0002a5d5c51b"); public static final String EXTRA_BT_ADDRESS = "extra_bt_address"; public static final int MSG_HELLO = 1; public static final int MSG_SEND_MESSAGE = 2; public static final String MSG_DATA_ADDRESS = "address"; public static final String MSG_DATA_MESSAGE = "message"; public static final int MSG_BEHOLD_ADDRESS = 3; public static final String MSG_DATA_BEHOLD_ADDRESS = "behold_address"; Vector<String> bannedAddress = new Vector<String>(); ContentObserver observer = new ContentObserver(new Handler()) { public void onChange(boolean selfChange) { loadBannedBeacons(); } }; BluetoothServerSocket serverSocket; Vector<BeaconConnection> connections = new Vector<BeaconConnection>(); Messenger boundMessenger; String beholdingAddress; static class SimpleHandler extends Handler { WeakReference<BeaconService> target; public SimpleHandler(BeaconService service) { target = new WeakReference<BeaconService>(service); } @Override public void handleMessage(Message msg) { boolean messageHandled = false; BeaconService service = target.get(); if (service != null) messageHandled = service.handlerMessage(msg); if (!messageHandled) super.handleMessage(msg); } } Messenger messenger = new Messenger(new SimpleHandler(this)); @Override public IBinder onBind(Intent intent) { Log.d(SERVICE_NAME, "onBind"); return messenger.getBinder(); } @Override public boolean onUnbind(Intent intent) { Log.d(SERVICE_NAME, "onUnbind"); boundMessenger = null; return super.onUnbind(intent); } public boolean handlerMessage(Message msg) { switch (msg.what) { case MSG_HELLO: this.boundMessenger = msg.replyTo; if (this.boundMessenger != null) { try { this.boundMessenger.send(Message.obtain(null, BeaconServiceProxy.MSG_HELLO)); } catch (RemoteException e) { // TODO Auto-generated catch block e.printStackTrace(); } } return true; case MSG_SEND_MESSAGE: if (!sendMessageTo(msg.getData())) Log.d(SERVICE_NAME, "sendMessageTo() failed"); return true; case MSG_BEHOLD_ADDRESS: beholdingAddress = msg.getData().getString(MSG_DATA_BEHOLD_ADDRESS); Log.d(SERVICE_NAME, "beholdingAddress: "+beholdingAddress); return true; } return false; } private boolean sendMessageTo(Bundle data) { String address = data.getString(MSG_DATA_ADDRESS); String message = data.getString(MSG_DATA_MESSAGE); Log.d(SERVICE_NAME, String.format("BeaconService.sendMessageTo %s %s", address, message)); if (address == null || address.isEmpty()) return false; BeaconConnection connection = null; for (BeaconConnection conn : connections) { if (conn.dev.getAddress().equals(address)) { connection = conn; Log.d(SERVICE_NAME, "found existing connection"); break; } } if (connection == null) { connection = connect(address); } if (connection == null) { Log.d(SERVICE_NAME, "failed to connect"); return false; } return connection.sendMessage(message); } @Override public void run() { try { serverSocket = BluetoothAdapter.getDefaultAdapter().listenUsingInsecureRfcommWithServiceRecord(SERVICE_NAME, SERVICE_UUID); while (true) { Log.d(SERVICE_NAME, "BlueBeacon is waiting for a client"); BluetoothSocket clientSocket = serverSocket.accept(); if (bannedAddress.contains(clientSocket.getRemoteDevice().getAddress())) { clientSocket.close(); Log.d(SERVICE_NAME, "A client denied: "+clientSocket.getRemoteDevice().getAddress()); } else { Log.d(SERVICE_NAME, "A client is connected: "+clientSocket.getRemoteDevice().getAddress()); connections.add(new BeaconConnection(this, clientSocket, clientSocket.getRemoteDevice(), this)); //startChatActivity(clientSocket.getRemoteDevice().getAddress()); } } } catch (IOException e) { e.printStackTrace(); Log.d(SERVICE_NAME, "Listening thread stopped."); } } @Override public void onCreate() { super.onCreate(); Log.d(SERVICE_NAME, "onCreate"); new Thread(this).start(); getContentResolver().registerContentObserver(BlueBeaconProvider.CONTENT_URI_BEACON, false, observer); } @Override public void onDestroy() { Log.d(SERVICE_NAME, "onDestroy"); if (serverSocket != null) { try { serverSocket.close(); } catch (IOException e) { e.printStackTrace(); } } disconnectAll(); getContentResolver().unregisterContentObserver(observer); super.onDestroy(); } private void disconnectAll() { synchronized (connections) { Log.d(SERVICE_NAME, String.format("disconnecting all connections...(%d)", connections.size())); for (BeaconConnection conn : connections) { conn.disconnect(); } } } @Override public void onDisconnected(BeaconConnection conn) { synchronized (connections) { Log.d(SERVICE_NAME, "onDisconnected: "+conn.getRemoteAddress()); if (!connections.remove(conn)) Log.e(SERVICE_NAME, "onDisconnected: disconnected connection is not found in connection list"); Log.d(SERVICE_NAME, "onDisconnected: number of remaining connection(s) = "+connections.size()); } } private BeaconConnection connect(BluetoothDevice dev) { if (dev == null) return null; BluetoothSocket socket; try { //socket = dev.createRfcommSocketToServiceRecord(SERVICE_UUID); socket = dev.createInsecureRfcommSocketToServiceRecord(SERVICE_UUID); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } int tryCount = 0; while (tryCount < 10) { try { socket.connect(); BeaconConnection conn = new BeaconConnection(this, socket, dev, this); connections.add(conn); return conn; } catch (IOException e) { tryCount++; } } return null; } private BeaconConnection connect(String address) { return connect(BluetoothAdapter.getDefaultAdapter().getRemoteDevice(address)); } private void loadBannedBeacons() { Cursor cursor = BlueBeaconProvider.queryBannedBeacons(getContentResolver()); bannedAddress.clear(); if (cursor.getCount() == 0) return; cursor.moveToFirst(); do { bannedAddress.add(cursor.getString(cursor.getColumnIndexOrThrow(BlueBeaconDBHelper.BeaconEntry.COLUMN_NAME_ADDRESS))); } while (cursor.moveToNext()); checkBannedConnection(); } private void checkBannedConnection() { synchronized(connections) { for (BeaconConnection conn : connections) { if (bannedAddress.contains(conn.remoteAddress)) { conn.disconnect(); } } } } @Override public void onReceiveMessage(BeaconConnection conn, String message) { if (beholdingAddress == null || !beholdingAddress.equals(conn.getRemoteAddress())) notifyMessageReceived(conn.getRemoteAddress(), message); } private void notifyMessageReceived(String remoteAddress, String message) { NotificationManager nm = (NotificationManager)getSystemService(Context.NOTIFICATION_SERVICE); nm.notify(0, new Notification.Builder(this) .setSmallIcon(R.drawable.ic_launcher) .setContentText(getString(R.string.app_name)) .setContentText(message) .setContentIntent(PendingIntent.getActivity(this, 0, new Intent(this, ChatActivity.class).putExtra(ChatActivity.EXTRA_ADDRESS, remoteAddress).addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP), PendingIntent.FLAG_UPDATE_CURRENT)) .setAutoCancel(true).setVibrate(new long[] {0, 500}) .build()); } }
package com.google.devrel.training.conference.spi; import static com.google.devrel.training.conference.service.OfyService.ofy; import javax.inject.Named; import com.google.api.server.spi.response.NotFoundException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.logging.Logger; import com.google.appengine.api.memcache.MemcacheService; import com.google.appengine.api.memcache.MemcacheServiceFactory; import com.google.api.server.spi.config.Api; import com.google.api.server.spi.config.ApiMethod; import com.google.api.server.spi.config.ApiMethod.HttpMethod; //import com.google.api.server.spi.config.Named; import com.google.api.server.spi.response.ConflictException; import com.google.api.server.spi.response.ForbiddenException; //import com.google.api.server.spi.response.NotFoundException; import com.google.api.server.spi.response.UnauthorizedException; //import com.google.appengine.api.memcache.MemcacheServiceFactory; import com.google.appengine.api.taskqueue.Queue; import com.google.appengine.api.taskqueue.QueueFactory; import com.google.appengine.api.taskqueue.TaskOptions; //import com.google.appengine.api.memcache.MemcacheServicePb.MemcacheService; import com.google.appengine.api.users.User; //import com.google.appengine.repackaged.org.apache.commons.logging.Log; import com.google.devrel.training.conference.Constants; import com.google.devrel.training.conference.domain.Announcement; import com.google.devrel.training.conference.domain.Conference; import com.google.devrel.training.conference.domain.Profile; import com.google.devrel.training.conference.service.OfyService; import com.google.devrel.training.conference.form.ConferenceForm; import com.google.devrel.training.conference.form.ConferenceQueryForm; import com.google.devrel.training.conference.form.ProfileForm; import com.google.devrel.training.conference.form.ProfileForm.TeeShirtSize; import com.googlecode.objectify.Key; import com.googlecode.objectify.Objectify; import com.googlecode.objectify.Work; import com.googlecode.objectify.cmd.Query; /** * Defines conference APIs. */ @Api(name = "conference", version = "v1", scopes = { Constants.EMAIL_SCOPE }, clientIds = { Constants.WEB_CLIENT_ID, Constants.ANDROID_CLIENT_ID, Constants.API_EXPLORER_CLIENT_ID }, audiences = {Constants.ANDROID_AUDIENCE}, description = "API for the Conference Central Backend application.") public class ConferenceApi { private static final Logger LOG = Logger.getLogger(ConferenceApi.class.getName()); /* * Get the display name from the user's email. For example, if the email is * lemoncake@example.com, then the display name becomes "lemoncake." */ private static String extractDefaultDisplayNameFromEmail(String email) { return email == null ? null : email.substring(0, email.indexOf("@")); } /** * Creates or updates a Profile object associated with the given user * object. * * @param user * A User object injected by the cloud endpoints. * @param profileForm * A ProfileForm object sent from the client form. * @return Profile object just created. * @throws UnauthorizedException * when the User object is null. */ // Declare this method as a method available externally through Endpoints @ApiMethod(name = "saveProfile", path = "profile", httpMethod = HttpMethod.POST) // The request that invokes this method should provide data that // conforms to the fields defined in ProfileForm // TODO 1 Pass the ProfileForm parameter // TODO 2 Pass the User parameter public Profile saveProfile(final User user, ProfileForm profileForm) throws UnauthorizedException { // String userId = null; //String mainEmail = null; //String displayName = "Your name will go here"; // TeeShirtSize teeShirtSize = TeeShirtSize.NOT_SPECIFIED; // TODO 2 // If the user is not logged in, throw an UnauthorizedException if (user == null){ throw new UnauthorizedException("Necesita Registrarse"); } // TODO 1 // Set the teeShirtSize to the value sent by the ProfileForm, if sent // otherwise leave it as the default value // TODO 1 // Set the displayName to the value sent by the ProfileForm, if sent // otherwise set it to null // TODO 2 // Get the userId and mainEmail String mainEmail = user.getEmail(); String userId = user.getUserId(); String displayName = profileForm.getDisplayName(); TeeShirtSize teeShirtSize = profileForm.getTeeShirtSize(); Profile profile = ofy().load().key(Key.create(Profile.class, userId)).now(); if(profile == null){ if(displayName == null){ displayName = extractDefaultDisplayNameFromEmail(user.getEmail()); } if(teeShirtSize == null){ teeShirtSize = teeShirtSize.NOT_SPECIFIED; } profile = new Profile(userId, displayName, mainEmail, teeShirtSize); } else{ profile.update(displayName, teeShirtSize); } // TODO 2 // If the displayName is null, set it to default value based on the user's email // by calling extractDefaultDisplayNameFromEmail(...) // Create a new Profile entity from the // userId, displayName, mainEmail and teeShirtSize // TODO 3 (In Lesson 3) // Save the Profile entity in the datastore ofy().save().entity(profile).now(); // Return the profile return profile; } /** * Returns a Profile object associated with the given user object. The cloud * endpoints system automatically inject the User object. * * @param user * A User object injected by the cloud endpoints. * @return Profile object. * @throws UnauthorizedException * when the User object is null. */ @ApiMethod(name = "getProfile", path = "profile", httpMethod = HttpMethod.GET) public Profile getProfile(final User user) throws UnauthorizedException { if (user == null) { throw new UnauthorizedException("Authorization required"); } // TODO // load the Profile Entity String userId = user.getUserId(); Key<Profile> key = Key.create(Profile.class,userId); Profile profile = ofy().load().key(key).now(); return profile; } /** * Gets the Profile entity for the current user * or creates it if it doesn't exist * @param user * @return user's Profile */ private static Profile getProfileFromUser(User user) { // First fetch the user's Profile from the datastore. Profile profile = ofy().load().key( Key.create(Profile.class, user.getUserId())).now(); if (profile == null) { // Create a new Profile if it doesn't exist. // Use default displayName and teeShirtSize String email = user.getEmail(); profile = new Profile(user.getUserId(), extractDefaultDisplayNameFromEmail(email), email, TeeShirtSize.NOT_SPECIFIED); } return profile; } /** * Creates a new Conference object and stores it to the datastore. * * @param user A user who invokes this method, null when the user is not signed in. * @param conferenceForm A ConferenceForm object representing user's inputs. * @return A newly created Conference Object. * @throws UnauthorizedException when the user is not signed in. */ @ApiMethod(name = "createConference", path = "conference", httpMethod = HttpMethod.POST) public Conference createConference(final User user, final ConferenceForm conferenceForm) throws UnauthorizedException { if (user == null) { throw new UnauthorizedException("Autorizacion requerida"); } // TODO (Lesson 4) // Get the userId of the logged in User final String userId = user.getUserId(); // TODO (Lesson 4) // Get the key for the User's Profile Key<Profile> profileKey = Key.create(Profile.class, userId); // TODO (Lesson 4) // Allocate a key for the conference -- let App Engine allocate the ID // Don't forget to include the parent Profile in the allocated ID final Key<Conference> conferenceKey = OfyService.factory().allocateId(profileKey,Conference.class); // TODO (Lesson 4) // Get the Conference Id from the Key final long conferenceId = conferenceKey.getId(); final Queue queue = QueueFactory.getDefaultQueue(); //Inicial la transaccion Conference conference = ofy().transact(new Work<Conference>(){ @Override public Conference run(){ // TODO (Lesson 4) // Get the existing Profile entity for the current user if there is one // Otherwise create a new Profile entity with default values Profile profile = getProfileFromUser(user); // TODO (Lesson 4) // Create a new Conference Entity, specifying the user's Profile entity // as the parent of the conference Conference conference = new Conference(conferenceId, userId,conferenceForm); // TODO (Lesson 4) // Save Conference and Profile Entities ofy().save().entities(profile,conference); queue.add(ofy().getTransaction(),TaskOptions.Builder.withUrl("/tasks/send_confirmation_email") .param("email", profile.getMainEmail()) .param("conferenceInfo", conference.toString())); return conference; } }); return conference; } @ApiMethod( name = "queryConferences", path = "queryConferences", httpMethod = HttpMethod.POST ) public List queryConferences(ConferenceQueryForm conferenceQueryForm) { Iterable<Conference> conferenceIterable = conferenceQueryForm.getQuery(); List<Conference> result = new ArrayList<>(0); List<Key<Profile>> organizersKeyList = new ArrayList<>(0); for (Conference conference : conferenceIterable) { organizersKeyList.add(Key.create(Profile.class, conference.getOrganizerUserId())); result.add(conference); } // To avoid separate datastore gets for each Conference, pre-fetch the Profiles. ofy().load().keys(organizersKeyList); return result; } @ApiMethod( name = "getConferencesCreated", path = "getConferencesCreated", httpMethod = HttpMethod.POST ) public List<Conference> getConferencesCreated(final User user) throws UnauthorizedException { // If not signed in, throw a 401 error. if (user == null) { throw new UnauthorizedException("Authorization required"); } String userId = user.getUserId(); Key<Profile> userKey = Key.create(Profile.class, userId); return ofy().load().type(Conference.class) .ancestor(userKey) .order("name").list(); } @ApiMethod( name = "getConferencesFiltered", path = "getConferencesFiltered", httpMethod = HttpMethod.POST ) public List<Conference> getConferencesFiltered(){ Query query = ofy().load().type(Conference.class); query = query.filter("maxAttendees >",10); query = query.filter("city =", "London"); query = query.filter("topics =", "Web Technologies"); query = query.filter("month =", 1) .order("maxAttendees").order("name"); return query.list(); } /** * Returns a Conference object with the given conferenceId. * * @param websafeConferenceKey The String representation of the Conference Key. * @return a Conference object with the given conferenceId. * @throws NotFoundException when there is no Conference with the given conferenceId. */ @ApiMethod( name = "getConference", path = "conference/{websafeConferenceKey}", httpMethod = HttpMethod.GET ) public Conference getConference( @Named("websafeConferenceKey") final String websafeConferenceKey) throws NotFoundException { Key<Conference> conferenceKey = Key.create(websafeConferenceKey); Conference conference = ofy().load().key(conferenceKey).now(); if (conference == null) { throw new NotFoundException("No Conference found with key: " + websafeConferenceKey); } return conference; } /** * Just a wrapper for Boolean. * We need this wrapped Boolean because endpoints functions must return * an object instance, they can't return a Type class such as * String or Integer or Boolean */ public static class WrappedBoolean { private final Boolean result; private final String reason; public WrappedBoolean(Boolean result) { this.result = result; this.reason = ""; } public WrappedBoolean(Boolean result, String reason) { this.result = result; this.reason = reason; } public Boolean getResult() { return result; } public String getReason() { return reason; } } /** * Register to attend the specified Conference. * * @param user An user who invokes this method, null when the user is not signed in. * @param websafeConferenceKey The String representation of the Conference Key. * @return Boolean true when success, otherwise false * @throws UnauthorizedException when the user is not signed in. * @throws NotFoundException when there is no Conference with the given conferenceId. */ @ApiMethod( name = "registerForConference", path = "conference/{websafeConferenceKey}/registration", httpMethod = HttpMethod.POST ) public WrappedBoolean registerForConference(final User user, @Named("websafeConferenceKey") final String websafeConferenceKey) throws UnauthorizedException, NotFoundException, ForbiddenException, ConflictException { // If not signed in, throw a 401 error. if (user == null) { throw new UnauthorizedException("Authorization required"); } // Get the userId WrappedBoolean result = ofy().transact(new Work<WrappedBoolean>() { @Override public WrappedBoolean run() { try { // Get the conference key Key<Conference> conferenceKey = Key.create(websafeConferenceKey); // Get the Conference entity from the datastore Conference conference = ofy().load().key(conferenceKey).now(); // 404 when there is no Conference with the given conferenceId. if (conference == null) { return new WrappedBoolean(false, "No Conference found with key: " + websafeConferenceKey); } // Get the user's Profile entity Profile profile = getProfileFromUser(user); // Has the user already registered to attend this conference? if (profile.getConferenceKeysToAttend().contains( websafeConferenceKey)) { return new WrappedBoolean(false, "Already registered"); } else if (conference.getSeatsAvailable() <= 0) { return new WrappedBoolean(false, "No seats available"); } else { // All looks good, go ahead and book the seat profile.addToConferenceKeysToAttend(websafeConferenceKey); conference.bookSeats(1); // Save the Conference and Profile entities ofy().save().entities(profile, conference).now(); // We are booked! return new WrappedBoolean(true); } } catch (Exception e) { return new WrappedBoolean(false, "Unknown exception"); } } }); // if result is false if (!result.getResult()) { if (result.getReason() == "Already registered") { throw new ConflictException("You have already registered"); } else if (result.getReason() == "No seats available") { throw new ConflictException("There are no seats available"); } else { throw new ForbiddenException("Unknown exception"); } } return result; } /** * Returns a collection of Conference Object that the user is going to attend. * * @param user An user who invokes this method, null when the user is not signed in. * @return a Collection of Conferences that the user is going to attend. * @throws UnauthorizedException when the User object is null. */ @ApiMethod( name = "getConferencesToAttend", path = "getConferencesToAttend", httpMethod = HttpMethod.GET ) public Collection<Conference> getConferencesToAttend(final User user) throws UnauthorizedException, NotFoundException { // If not signed in, throw a 401 error. if (user == null) { throw new UnauthorizedException("Authorization required"); } // TODO // Get the Profile entity for the user Profile profile = getProfileFromUser(user); // Change this; if (profile == null) { throw new NotFoundException("Profile doesn't exist."); } // TODO // Get the value of the profile's conferenceKeysToAttend property List<String> keyStringsToAttend = profile.getConferenceKeysToAttend(); // change this List<Key<Conference>> keysToAttend = new ArrayList<>(); for (String keyString : keyStringsToAttend) { keysToAttend.add(Key.<Conference>create(keyString)); } return ofy().load().keys(keysToAttend).values(); } /** * Unregister from the specified Conference. * * @param user An user who invokes this method, null when the user is not signed in. * @param websafeConferenceKey The String representation of the Conference Key to unregister from. * @return Boolean true when success, otherwise false. * @throws UnauthorizedException when the user is not signed in. * @throws NotFoundException when there is no Conference with the given conferenceId. */ @ApiMethod( name = "unregisterFromConference", path = "conference/{websafeConferenceKey}/registration", httpMethod = HttpMethod.DELETE) public WrappedBoolean unregisterFromConference( final User user, @Named("websafeConferenceKey") final String websafeConferenceKey ) throws UnauthorizedException, NotFoundException, ForbiddenException, ConflictException { return null; } @ApiMethod( name = "getAnnouncement", path = "announcement", httpMethod = HttpMethod.GET ) public Announcement getAnnouncement() { MemcacheService memcacheService = MemcacheServiceFactory.getMemcacheService(); Object message = memcacheService.get(Constants.MEMCACHE_ANNOUNCEMENTS_KEY); if (message != null) { return new Announcement(message.toString()); } return null; } }
package org.reclipse.structure.generator.util; import org.eclipse.emf.ecore.EParameter; import org.reclipse.structure.generator.util.more.SDMUtil; import org.reclipse.structure.specification.OperatorType; import org.reclipse.structure.specification.PSAttributeConstraint; import org.reclipse.structure.specification.PSLink; import org.reclipse.structure.specification.PSMetricConstraint; import org.storydriven.core.expressions.Expression; import org.storydriven.core.expressions.ExpressionsFactory; import org.storydriven.core.expressions.TextualExpression; import org.storydriven.core.expressions.common.CommonExpressionsFactory; import org.storydriven.core.expressions.common.LiteralExpression; import org.storydriven.storydiagrams.activities.Activity; import org.storydriven.storydiagrams.calls.CallsFactory; import org.storydriven.storydiagrams.calls.ParameterExtension; import org.storydriven.storydiagrams.calls.expressions.CallsExpressionsFactory; import org.storydriven.storydiagrams.calls.expressions.ParameterExpression; import org.storydriven.storydiagrams.patterns.ObjectVariable; import org.storydriven.storydiagrams.patterns.PrimitiveVariable; import org.storydriven.storydiagrams.patterns.expressions.AttributeValueExpression; import org.storydriven.storydiagrams.patterns.expressions.PatternsExpressionsFactory; import org.storydriven.storydiagrams.patterns.expressions.PrimitiveVariableExpression; public final class ExpressionsUtil { private static final PatternsExpressionsFactory PATTERNS_FACTORY = PatternsExpressionsFactory.eINSTANCE; private static final CallsExpressionsFactory CALLS_FACTORY = CallsExpressionsFactory.eINSTANCE; private static final ExpressionsFactory BASE_FACTORY = ExpressionsFactory.eINSTANCE; private static final CommonExpressionsFactory COMMON_FACTORY = CommonExpressionsFactory.eINSTANCE; /** * This variable determines if only OCL expressions are generated by the util or if expressions * using the specialized expressions metamodel of story diagrams are generated. As of the release * of the techreport "Story Diagrams - Syntax and Semantics v0.2" only OCL expressions are * supported by the SD Interpreter. */ private static final boolean USE_ONLY_OCL_EXPRESSIONS = true; private ExpressionsUtil() { // hide constructor } public static Expression getContextBindingExpression(Activity activity) { for (EParameter param : activity.getInParameters()) { if (Constants.VAR_CONTEXT.equals(param.getName())) { ParameterExtension extension = CallsFactory.eINSTANCE.createParameterExtension(); extension.setParameter(param); ParameterExpression element = CallsExpressionsFactory.eINSTANCE.createParameterExpression(); element.setParameter(extension); return element; } } return null; } public static TextualExpression createOCLExpression(String expressionText) { TextualExpression expr = BASE_FACTORY.createTextualExpression(); expr.setLanguage("OCL"); expr.setLanguageVersion("1.0"); expr.setExpressionText(expressionText); return expr; } public static Expression createParameterExpression(EParameter parameter) { if (USE_ONLY_OCL_EXPRESSIONS) { return createOCLParameterExpression(parameter); } else { ParameterExtension extension = CallsFactory.eINSTANCE.createParameterExtension(); extension.setParameter(parameter); ParameterExpression element = CALLS_FACTORY.createParameterExpression(); element.setParameter(extension); return element; } } public static TextualExpression createOCLParameterExpression(EParameter parameter) { return createOCLExpression(parameter.getName()); } public static Expression createAttributeExpression(ObjectVariable variable, PSAttributeConstraint constraint) { if (USE_ONLY_OCL_EXPRESSIONS) { return createOCLAttributeExpression(variable, constraint); } else { AttributeValueExpression attributeValueExpression = PATTERNS_FACTORY.createAttributeValueExpression(); attributeValueExpression.setObject(variable); attributeValueExpression.setAttribute(constraint.getAttribute()); LiteralExpression literalExpression = COMMON_FACTORY.createLiteralExpression(); literalExpression.setValue(constraint.getValueExpression()); return SDMUtil .createComparingExpression(attributeValueExpression, constraint.getOperator(), literalExpression); } } public static TextualExpression createOCLAttributeExpression(ObjectVariable variable, PSAttributeConstraint constraint) { String attribute = variable.getName() + "." + constraint.getAttribute().getName(); String operator = ""; switch (constraint.getOperator().getValue()) { case OperatorType.LESS_VALUE: operator = "<"; break; case OperatorType.LESS_OR_EQUAL_VALUE: operator = "<="; break; case OperatorType.GREATER_VALUE: operator = ">"; break; case OperatorType.GREATER_OR_EQUAL_VALUE: operator = ">="; break; case OperatorType.EQUAL_VALUE: operator = "="; break; case OperatorType.UNEQUAL_VALUE: operator = "<>"; break; default: assert false : "Unexpected operator type" + constraint.getOperator().getName(); } String value = constraint.getValueExpression(); return createOCLExpression(attribute + operator + value); } public static Expression createMetricExpression(PSMetricConstraint constraint) { // FIXME: implement metric expression throw new UnsupportedOperationException("Metric Expressions are not yet implemented!"); } public static Expression createQualifierExpression(PSLink link) { if (USE_ONLY_OCL_EXPRESSIONS) { return createOCLQualifierExpression(link); } else { LiteralExpression expression = COMMON_FACTORY.createLiteralExpression(); expression.setValue(link.getQualifier()); return expression; } } public static Expression createQualifierExpression(String qualifier) { if (USE_ONLY_OCL_EXPRESSIONS) { return createOCLQualifierExpression(qualifier); } else { LiteralExpression expression = COMMON_FACTORY.createLiteralExpression(); expression.setValue(qualifier); return expression; } } public static TextualExpression createOCLQualifierExpression(PSLink link) { return createOCLExpression("'"+link.getQualifier()+"'"); } public static TextualExpression createOCLQualifierExpression(String qualifier) { return createOCLExpression("'"+qualifier+"'"); } public static Expression createEBooleanExpression(boolean value) { if (USE_ONLY_OCL_EXPRESSIONS) { return createOCLBooleanExpression(value); } else { LiteralExpression expression = COMMON_FACTORY.createLiteralExpression(); expression.setValue(String.valueOf(value)); return expression; } } public static TextualExpression createOCLBooleanExpression(boolean value) { return createOCLExpression(String.valueOf(value)); } public static Expression createPrimitiveVariableExpression(PrimitiveVariable variable) { if (USE_ONLY_OCL_EXPRESSIONS) { return createOCLPrimitiveVariableExpression(variable); } else { PrimitiveVariableExpression expression = PATTERNS_FACTORY.createPrimitiveVariableExpression(); expression.setPrimitiveVariable(variable); return expression; } } public static TextualExpression createOCLPrimitiveVariableExpression(PrimitiveVariable variable) { return createOCLExpression(variable.getVariableName()); } }
package net.minecraft.network; import com.google.gson.JsonArray; import com.google.gson.JsonDeserializationContext; import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonSerializationContext; import com.google.gson.JsonSerializer; import com.mojang.authlib.GameProfile; import java.lang.reflect.Type; import java.util.UUID; import net.minecraft.util.IChatComponent; import net.minecraft.util.JsonUtils; public class ServerStatusResponse { private IChatComponent serverMotd; private ServerStatusResponse.PlayerCountData playerCount; private ServerStatusResponse.MinecraftProtocolVersionIdentifier protocolVersion; private String favicon; private static final String __OBFID = "CL_00001385"; public IChatComponent getServerDescription() { return this.serverMotd; } public void setServerDescription(IChatComponent motd) { this.serverMotd = motd; } public ServerStatusResponse.PlayerCountData getPlayerCountData() { return this.playerCount; } public void setPlayerCountData(ServerStatusResponse.PlayerCountData countData) { this.playerCount = countData; } public ServerStatusResponse.MinecraftProtocolVersionIdentifier getProtocolVersionInfo() { return this.protocolVersion; } public void setProtocolVersionInfo(ServerStatusResponse.MinecraftProtocolVersionIdentifier protocolVersionData) { this.protocolVersion = protocolVersionData; } public void setFavicon(String faviconBlob) { this.favicon = faviconBlob; } public String getFavicon() { return this.favicon; } public static class MinecraftProtocolVersionIdentifier { private final String name; private final int protocol; private static final String __OBFID = "CL_00001389"; public MinecraftProtocolVersionIdentifier(String nameIn, int protocolIn) { this.name = nameIn; this.protocol = protocolIn; } public String getName() { return this.name; } public int getProtocol() { return this.protocol; } public static class Serializer implements JsonDeserializer, JsonSerializer { private static final String __OBFID = "CL_00001390"; public ServerStatusResponse.MinecraftProtocolVersionIdentifier deserialize(JsonElement p_deserialize_1_, Type p_deserialize_2_, JsonDeserializationContext p_deserialize_3_) { JsonObject var4 = JsonUtils.getElementAsJsonObject(p_deserialize_1_, "version"); return new ServerStatusResponse.MinecraftProtocolVersionIdentifier(JsonUtils.getJsonObjectStringFieldValue(var4, "name"), JsonUtils.getJsonObjectIntegerFieldValue(var4, "protocol")); } public JsonElement serialize(ServerStatusResponse.MinecraftProtocolVersionIdentifier p_serialize_1_, Type p_serialize_2_, JsonSerializationContext p_serialize_3_) { JsonObject var4 = new JsonObject(); var4.addProperty("name", p_serialize_1_.getName()); var4.addProperty("protocol", Integer.valueOf(p_serialize_1_.getProtocol())); return var4; } public JsonElement serialize(Object p_serialize_1_, Type p_serialize_2_, JsonSerializationContext p_serialize_3_) { return this.serialize((ServerStatusResponse.MinecraftProtocolVersionIdentifier)p_serialize_1_, p_serialize_2_, p_serialize_3_); } } } public static class PlayerCountData { private final int maxPlayers; private final int onlinePlayerCount; private GameProfile[] players; private static final String __OBFID = "CL_00001386"; public PlayerCountData(int p_i45274_1_, int p_i45274_2_) { this.maxPlayers = p_i45274_1_; this.onlinePlayerCount = p_i45274_2_; } public int getMaxPlayers() { return this.maxPlayers; } public int getOnlinePlayerCount() { return this.onlinePlayerCount; } public GameProfile[] getPlayers() { return this.players; } public void setPlayers(GameProfile[] playersIn) { this.players = playersIn; } public static class Serializer implements JsonDeserializer, JsonSerializer { private static final String __OBFID = "CL_00001387"; public ServerStatusResponse.PlayerCountData deserialize(JsonElement p_deserialize_1_, Type p_deserialize_2_, JsonDeserializationContext p_deserialize_3_) { JsonObject var4 = JsonUtils.getElementAsJsonObject(p_deserialize_1_, "players"); ServerStatusResponse.PlayerCountData var5 = new ServerStatusResponse.PlayerCountData(JsonUtils.getJsonObjectIntegerFieldValue(var4, "max"), JsonUtils.getJsonObjectIntegerFieldValue(var4, "online")); if (JsonUtils.jsonObjectFieldTypeIsArray(var4, "sample")) { JsonArray var6 = JsonUtils.getJsonObjectJsonArrayField(var4, "sample"); if (var6.size() > 0) { GameProfile[] var7 = new GameProfile[var6.size()]; for (int var8 = 0; var8 < var7.length; ++var8) { JsonObject var9 = JsonUtils.getElementAsJsonObject(var6.get(var8), "player[" + var8 + "]"); String var10 = JsonUtils.getJsonObjectStringFieldValue(var9, "id"); var7[var8] = new GameProfile(UUID.fromString(var10), JsonUtils.getJsonObjectStringFieldValue(var9, "name")); } var5.setPlayers(var7); } } return var5; } public JsonElement serialize(ServerStatusResponse.PlayerCountData p_serialize_1_, Type p_serialize_2_, JsonSerializationContext p_serialize_3_) { JsonObject var4 = new JsonObject(); var4.addProperty("max", Integer.valueOf(p_serialize_1_.getMaxPlayers())); var4.addProperty("online", Integer.valueOf(p_serialize_1_.getOnlinePlayerCount())); if (p_serialize_1_.getPlayers() != null && p_serialize_1_.getPlayers().length > 0) { JsonArray var5 = new JsonArray(); for (int var6 = 0; var6 < p_serialize_1_.getPlayers().length; ++var6) { JsonObject var7 = new JsonObject(); UUID var8 = p_serialize_1_.getPlayers()[var6].getId(); var7.addProperty("id", var8 == null ? "" : var8.toString()); var7.addProperty("name", p_serialize_1_.getPlayers()[var6].getName()); var5.add(var7); } var4.add("sample", var5); } return var4; } public JsonElement serialize(Object p_serialize_1_, Type p_serialize_2_, JsonSerializationContext p_serialize_3_) { return this.serialize((ServerStatusResponse.PlayerCountData)p_serialize_1_, p_serialize_2_, p_serialize_3_); } } } public static class Serializer implements JsonDeserializer, JsonSerializer { private static final String __OBFID = "CL_00001388"; public ServerStatusResponse deserialize(JsonElement p_deserialize_1_, Type p_deserialize_2_, JsonDeserializationContext p_deserialize_3_) { JsonObject var4 = JsonUtils.getElementAsJsonObject(p_deserialize_1_, "status"); ServerStatusResponse var5 = new ServerStatusResponse(); if (var4.has("description")) { var5.setServerDescription((IChatComponent)p_deserialize_3_.deserialize(var4.get("description"), IChatComponent.class)); } if (var4.has("players")) { var5.setPlayerCountData((ServerStatusResponse.PlayerCountData)p_deserialize_3_.deserialize(var4.get("players"), ServerStatusResponse.PlayerCountData.class)); } if (var4.has("version")) { var5.setProtocolVersionInfo((ServerStatusResponse.MinecraftProtocolVersionIdentifier)p_deserialize_3_.deserialize(var4.get("version"), ServerStatusResponse.MinecraftProtocolVersionIdentifier.class)); } if (var4.has("favicon")) { var5.setFavicon(JsonUtils.getJsonObjectStringFieldValue(var4, "favicon")); } return var5; } public JsonElement serialize(ServerStatusResponse p_serialize_1_, Type p_serialize_2_, JsonSerializationContext p_serialize_3_) { JsonObject var4 = new JsonObject(); if (p_serialize_1_.getServerDescription() != null) { var4.add("description", p_serialize_3_.serialize(p_serialize_1_.getServerDescription())); } if (p_serialize_1_.getPlayerCountData() != null) { var4.add("players", p_serialize_3_.serialize(p_serialize_1_.getPlayerCountData())); } if (p_serialize_1_.getProtocolVersionInfo() != null) { var4.add("version", p_serialize_3_.serialize(p_serialize_1_.getProtocolVersionInfo())); } if (p_serialize_1_.getFavicon() != null) { var4.addProperty("favicon", p_serialize_1_.getFavicon()); } return var4; } public JsonElement serialize(Object p_serialize_1_, Type p_serialize_2_, JsonSerializationContext p_serialize_3_) { return this.serialize((ServerStatusResponse)p_serialize_1_, p_serialize_2_, p_serialize_3_); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.hadoopImpl.mapreduce; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Properties; import org.apache.accumulo.core.client.Accumulo; import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.sample.SamplerConfiguration; import org.apache.accumulo.core.conf.ClientProperty; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.hadoop.mapreduce.InputFormatBuilder; import org.apache.accumulo.hadoopImpl.mapreduce.lib.InputConfigurator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Job; public class InputFormatBuilderImpl<T> implements InputFormatBuilder, InputFormatBuilder.ClientParams<T>, InputFormatBuilder.TableParams<T>, InputFormatBuilder.InputFormatOptions<T> { private Class<?> callingClass; private Properties clientProps; private String clientPropsPath; private String currentTable; private Map<String,InputTableConfig> tableConfigMap = Collections.emptyMap(); public InputFormatBuilderImpl(Class<?> callingClass) { this.callingClass = callingClass; } @Override public InputFormatBuilder.TableParams<T> clientProperties(Properties clientProperties) { this.clientProps = Objects.requireNonNull(clientProperties, "clientProperties must not be null"); return this; } @Override public TableParams<T> clientPropertiesPath(String clientPropsPath) { this.clientPropsPath = Objects.requireNonNull(clientPropsPath, "clientPropsPath must not be null"); return this; } @Override public InputFormatBuilder.InputFormatOptions<T> table(String tableName) { this.currentTable = Objects.requireNonNull(tableName, "Table name must not be null"); if (tableConfigMap.isEmpty()) { tableConfigMap = new LinkedHashMap<>(); } tableConfigMap.put(currentTable, new InputTableConfig()); return this; } @Override public InputFormatBuilder.InputFormatOptions<T> auths(Authorizations auths) { tableConfigMap.get(currentTable) .setScanAuths(Objects.requireNonNull(auths, "Authorizations must not be null")); return this; } @Override public InputFormatBuilder.InputFormatOptions<T> classLoaderContext(String context) { tableConfigMap.get(currentTable).setContext(context); return this; } @Override public InputFormatBuilder.InputFormatOptions<T> ranges(Collection<Range> ranges) { List<Range> newRanges = List.copyOf(Objects.requireNonNull(ranges, "Collection of ranges is null")); if (newRanges.isEmpty()) { throw new IllegalArgumentException("Specified collection of ranges is empty."); } tableConfigMap.get(currentTable).setRanges(newRanges); return this; } @Override public InputFormatBuilder.InputFormatOptions<T> fetchColumns(Collection<IteratorSetting.Column> fetchColumns) { Collection<IteratorSetting.Column> newFetchColumns = List.copyOf(Objects.requireNonNull(fetchColumns, "Collection of fetch columns is null")); if (newFetchColumns.isEmpty()) { throw new IllegalArgumentException("Specified collection of fetch columns is empty."); } tableConfigMap.get(currentTable).fetchColumns(newFetchColumns); return this; } @Override public InputFormatBuilder.InputFormatOptions<T> addIterator(IteratorSetting cfg) { // store iterators by name to prevent duplicates Objects.requireNonNull(cfg, "IteratorSetting must not be null."); tableConfigMap.get(currentTable).addIterator(cfg); return this; } @Override public InputFormatBuilder.InputFormatOptions<T> executionHints(Map<String,String> hints) { Map<String,String> newHints = Map.copyOf(Objects.requireNonNull(hints, "Map of execution hints must not be null.")); if (newHints.isEmpty()) { throw new IllegalArgumentException("Specified map of execution hints is empty."); } tableConfigMap.get(currentTable).setExecutionHints(newHints); return this; } @Override public InputFormatBuilder.InputFormatOptions<T> samplerConfiguration(SamplerConfiguration samplerConfig) { tableConfigMap.get(currentTable).setSamplerConfiguration(samplerConfig); return this; } @Override public InputFormatOptions<T> autoAdjustRanges(boolean value) { tableConfigMap.get(currentTable).setAutoAdjustRanges(value); return this; } @Override public InputFormatOptions<T> scanIsolation(boolean value) { tableConfigMap.get(currentTable).setUseIsolatedScanners(value); return this; } @Override public InputFormatOptions<T> localIterators(boolean value) { tableConfigMap.get(currentTable).setUseLocalIterators(value); return this; } @Override public InputFormatOptions<T> offlineScan(boolean value) { tableConfigMap.get(currentTable).setOfflineScan(value); return this; } @Override public InputFormatOptions<T> batchScan(boolean value) { tableConfigMap.get(currentTable).setUseBatchScan(value); if (value) { tableConfigMap.get(currentTable).setAutoAdjustRanges(true); } return this; } @Override public void store(T j) throws AccumuloException, AccumuloSecurityException { if (j instanceof Job) { store((Job) j); } else if (j instanceof JobConf) { store((JobConf) j); } else { throw new IllegalArgumentException("Unexpected type " + j.getClass().getName()); } } /** * Final builder method for mapreduce configuration */ private void store(Job job) throws AccumuloException, AccumuloSecurityException { _store(job.getConfiguration()); } private void _store(Configuration conf) throws AccumuloException, AccumuloSecurityException { InputConfigurator.setClientProperties(callingClass, conf, clientProps, clientPropsPath); if (tableConfigMap.isEmpty()) { throw new IllegalArgumentException("At least one Table must be configured for job."); } // if only one table use the single table configuration method if (tableConfigMap.size() == 1) { Map.Entry<String,InputTableConfig> entry = tableConfigMap.entrySet().iterator().next(); InputConfigurator.setInputTableName(callingClass, conf, entry.getKey()); InputTableConfig config = entry.getValue(); if (!config.getScanAuths().isPresent()) { Properties props = InputConfigurator.getClientProperties(callingClass, conf); try (AccumuloClient c = Accumulo.newClient().from(props).build()) { String principal = ClientProperty.AUTH_PRINCIPAL.getValue(props); config.setScanAuths(c.securityOperations().getUserAuthorizations(principal)); } } InputConfigurator.setScanAuthorizations(callingClass, conf, config.getScanAuths().get()); // all optional values if (config.getContext().isPresent()) { InputConfigurator.setClassLoaderContext(callingClass, conf, config.getContext().get()); } if (!config.getRanges().isEmpty()) { InputConfigurator.setRanges(callingClass, conf, config.getRanges()); } if (!config.getIterators().isEmpty()) { InputConfigurator.writeIteratorsToConf(callingClass, conf, config.getIterators()); } if (!config.getFetchedColumns().isEmpty()) { InputConfigurator.fetchColumns(callingClass, conf, config.getFetchedColumns()); } if (config.getSamplerConfiguration() != null) { InputConfigurator.setSamplerConfiguration(callingClass, conf, config.getSamplerConfiguration()); } if (!config.getExecutionHints().isEmpty()) { InputConfigurator.setExecutionHints(callingClass, conf, config.getExecutionHints()); } InputConfigurator.setAutoAdjustRanges(callingClass, conf, config.shouldAutoAdjustRanges()); InputConfigurator.setScanIsolation(callingClass, conf, config.shouldUseIsolatedScanners()); InputConfigurator.setLocalIterators(callingClass, conf, config.shouldUseLocalIterators()); InputConfigurator.setOfflineTableScan(callingClass, conf, config.isOfflineScan()); InputConfigurator.setBatchScan(callingClass, conf, config.shouldBatchScan()); } else { InputConfigurator.setInputTableConfigs(callingClass, conf, tableConfigMap); } InputConfigurator.setJobStored(callingClass, conf); } /** * Final builder method for legacy mapred configuration */ private void store(JobConf jobConf) throws AccumuloException, AccumuloSecurityException { _store(jobConf); } }
/* * $Id$ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.struts2.views.jsp; import com.mockobjects.dynamic.Mock; import com.opensymphony.xwork2.Action; import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.ActionInvocation; import com.opensymphony.xwork2.ActionProxy; import com.opensymphony.xwork2.config.entities.ActionConfig; import org.apache.struts2.ServletActionContext; import org.apache.struts2.StrutsException; import org.apache.struts2.TestAction; import org.apache.struts2.TestActionTagResult; import org.apache.struts2.TestConfigurationProvider; import org.apache.struts2.components.ActionComponent; import org.apache.struts2.dispatcher.mapper.ActionMapper; import org.apache.struts2.dispatcher.mapper.DefaultActionMapper; import javax.servlet.jsp.JspException; import javax.servlet.jsp.PageContext; import java.util.HashMap; /** * Unit test for {@link ActionTag}. */ public class ActionTagTest extends AbstractTagTest { public void testActionTagWithNamespace() { request.setupGetServletPath(TestConfigurationProvider.TEST_NAMESPACE + "/" + "foo.action"); ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setName(TestConfigurationProvider.TEST_NAMESPACE_ACTION); tag.setId(TestConfigurationProvider.TEST_NAMESPACE_ACTION); try { tag.doStartTag(); ActionComponent ac = ((ActionComponent) tag.component); tag.doEndTag(); ActionProxy proxy = ac.getProxy(); Object o = pageContext.findAttribute(TestConfigurationProvider.TEST_NAMESPACE_ACTION); assertTrue(o instanceof TestAction); assertEquals(TestConfigurationProvider.TEST_NAMESPACE, proxy.getNamespace()); } catch (JspException ex) { ex.printStackTrace(); fail(); } } public void testSimple() { request.setupGetServletPath("/foo.action"); ActionConfig config = configuration.getRuntimeConfiguration().getActionConfig("", "testAction"); container.inject(config.getInterceptors().get(0).getInterceptor()); ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setName("testAction"); tag.setId("testAction"); int stackSize = stack.size(); try { tag.doStartTag(); tag.addParameter("foo", "myFoo"); tag.doEndTag(); assertEquals(stack.size(), ActionContext.getContext().getValueStack().size()); assertEquals("myFoo", stack.findValue("#testAction.foo")); assertEquals(stackSize, stack.size()); Object o = pageContext.findAttribute("testAction"); assertTrue(o instanceof TestAction); assertEquals("myFoo", ((TestAction) o).getFoo()); assertEquals(Action.SUCCESS, ((TestAction) o).getResult()); } catch (JspException ex) { ex.printStackTrace(); fail(); } } public void testSimpleWithoutServletActionContext() { ServletActionContext.setRequest(null); ServletActionContext.setResponse(null); this.testSimple(); } public void testSimpleWithctionMethodInOriginalURI() { request.setupGetServletPath("/foo!foo.action"); ActionConfig config = configuration.getRuntimeConfiguration().getActionConfig("", "testAction"); container.inject(config.getInterceptors().get(0).getInterceptor()); ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setName("testAction"); tag.setId("testAction"); int stackSize = stack.size(); try { tag.doStartTag(); tag.addParameter("foo", "myFoo"); tag.doEndTag(); assertEquals(stack.size(), ActionContext.getContext().getValueStack().size()); assertEquals("myFoo", stack.findValue("#testAction.foo")); assertEquals(stackSize, stack.size()); Object o = pageContext.findAttribute("testAction"); assertTrue(o instanceof TestAction); assertEquals("myFoo", ((TestAction) o).getFoo()); assertEquals(Action.SUCCESS, ((TestAction) o).getResult()); } catch (JspException ex) { ex.printStackTrace(); fail(); } } public void testActionWithExecuteResult() throws Exception { ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setNamespace(""); tag.setName("testActionTagAction"); tag.setExecuteResult(true); tag.doStartTag(); // tag clear components on doEndTag ActionComponent component = (ActionComponent) tag.getComponent(); tag.doEndTag(); TestActionTagResult result = (TestActionTagResult) component.getProxy().getInvocation().getResult(); assertTrue(stack.getContext().containsKey(ServletActionContext.PAGE_CONTEXT)); assertTrue(stack.getContext().get(ServletActionContext.PAGE_CONTEXT)instanceof PageContext); assertTrue(result.isExecuted()); } public void testActionWithoutExecuteResult() throws Exception { ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setNamespace(""); tag.setName("testActionTagAction"); tag.setExecuteResult(false); tag.doStartTag(); // tag clear components on doEndTag, so we need to get it here ActionComponent component = (ActionComponent) tag.getComponent(); tag.doEndTag(); TestActionTagResult result = (TestActionTagResult) component.getProxy().getInvocation().getResult(); assertTrue(stack.getContext().containsKey(ServletActionContext.PAGE_CONTEXT)); assertTrue(stack.getContext().get(ServletActionContext.PAGE_CONTEXT)instanceof PageContext); assertNull(result); // result is never executed, hence never set into invocation } public void testExecuteButResetReturnSameInvocation() throws Exception { Mock mockActionInv = new Mock(ActionInvocation.class); ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setNamespace(""); tag.setName("testActionTagAction"); tag.setExecuteResult(true); ActionContext.getContext().setActionInvocation((ActionInvocation) mockActionInv.proxy()); ActionInvocation oldInvocation = ActionContext.getContext().getActionInvocation(); assertNotNull(oldInvocation); tag.doStartTag(); // tag clear components on doEndTag ActionComponent component = (ActionComponent) tag.getComponent(); tag.doEndTag(); assertTrue(oldInvocation == ActionContext.getContext().getActionInvocation()); } public void testIngoreContextParamsFalse() throws Exception { ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setNamespace(""); tag.setName("testActionTagAction"); tag.setExecuteResult(false); tag.setIgnoreContextParams(false); ActionContext.getContext().getParameters().put("user", "Santa Claus"); tag.doStartTag(); // tag clear components on doEndTag, so we need to get it here ActionComponent component = (ActionComponent) tag.getComponent(); tag.doEndTag(); // check parameters, there should be one ActionInvocation ai = component.getProxy().getInvocation(); ActionContext ac = ai.getInvocationContext(); assertEquals(1, ac.getParameters().size()); } public void testIngoreContextParamsTrue() throws Exception { ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setNamespace(""); tag.setName("testActionTagAction"); tag.setExecuteResult(false); tag.setIgnoreContextParams(true); ActionContext.getContext().getParameters().put("user", "Santa Claus"); tag.doStartTag(); // tag clear components on doEndTag, so we need to get it here ActionComponent component = (ActionComponent) tag.getComponent(); tag.doEndTag(); // check parameters, there should be one ActionInvocation ai = component.getProxy().getInvocation(); ActionContext ac = ai.getInvocationContext(); assertEquals(0, ac.getParameters().size()); } public void testNoNameDefined() throws Exception { ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setNamespace(""); tag.setName(null); tag.setExecuteResult(false); try { tag.doStartTag(); tag.doEndTag(); fail("Should have thrown RuntimeException"); } catch (StrutsException e) { assertEquals("tag 'actioncomponent', field 'name': Action name is required. Example: updatePerson", e.getMessage()); } } // FIXME: Logging the error seems to cause the standard Maven build to fail public void testUnknownNameDefined() throws Exception { ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setNamespace(""); tag.setName("UNKNOWN_NAME"); tag.setExecuteResult(false); tag.doStartTag(); tag.doEndTag(); // will just log it to ERROR but we run the code to test that it works somehow } public void testActionMethodWithExecuteResult() throws Exception { ActionTag tag = new ActionTag(); tag.setPageContext(pageContext); tag.setNamespace(""); tag.setName("testActionTagAction!input"); tag.setExecuteResult(true); ((DefaultActionMapper)container.getInstance(ActionMapper.class)).setAllowDynamicMethodCalls("true"); tag.doStartTag(); // tag clear components on doEndTag ActionComponent component = (ActionComponent) tag.getComponent(); tag.doEndTag(); TestActionTagResult result = (TestActionTagResult) component.getProxy().getInvocation().getResult(); assertTrue(stack.getContext().containsKey(ServletActionContext.PAGE_CONTEXT)); assertTrue(stack.getContext().get(ServletActionContext.PAGE_CONTEXT)instanceof PageContext); assertTrue(result.isExecuted()); } protected void setUp() throws Exception { super.setUp(); initDispatcher(new HashMap<String, String>() {{ put("configProviders", TestConfigurationProvider.class.getName()); }}); createMocks(); } protected void tearDown() throws Exception { super.tearDown(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.math.BigDecimal; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Time; import java.sql.Timestamp; import java.util.Locale; import java.util.Properties; import org.apache.phoenix.expression.function.ToNumberFunction; import org.apache.phoenix.schema.types.PDecimal; import org.apache.phoenix.util.PhoenixRuntime; import org.apache.phoenix.util.PropertiesUtil; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; /** * Tests for the TO_NUMBER built-in function. * * @see ToNumberFunction * * @since 0.1 */ public class ToNumberFunctionIT extends BaseClientManagedTimeIT { // This test changes to locale to en_US, and saves the previous locale here private static Locale saveLocale; public static final String TO_NUMBER_TABLE_NAME = "TO_NUMBER_TABLE"; public static final String TO_NUMBER_TABLE_DDL = "create table " + TO_NUMBER_TABLE_NAME + "(a_id integer not null, \n" + "a_string char(4) not null, \n" + "b_string char(4), \n" + "a_date date, \n" + "a_time date, \n" + "a_timestamp timestamp \n" + "CONSTRAINT my_pk PRIMARY KEY (a_id, a_string))"; private Date row1Date; private Date row2Date; private Date row3Date; private Time row1Time; private Time row2Time; private Time row3Time; private Timestamp row1Timestamp; private Timestamp row2Timestamp; private Timestamp row3Timestamp; @BeforeClass public static void setUpBeforeClass() { saveLocale = Locale.getDefault(); Locale.setDefault(Locale.US); } @AfterClass public static void tearDownAfterClass() { Locale.setDefault(saveLocale); } @Before public void initTable() throws Exception { long ts = nextTimestamp(); createTestTable(getUrl(), TO_NUMBER_TABLE_DDL, null, ts-2); String url = getUrl() + ";" + PhoenixRuntime.CURRENT_SCN_ATTRIB + "=" + ts; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); conn.setAutoCommit(false); PreparedStatement stmt = conn.prepareStatement( "upsert into " + TO_NUMBER_TABLE_NAME + " (a_id, " + " a_string," + " b_string," + " a_date," + " a_time," + " a_timestamp)" + "VALUES (?, ?, ?, ?, ?, ?)"); stmt.setInt(1, 1); stmt.setString(2, " 1"); stmt.setString(3, " 1"); row1Date = new Date(System.currentTimeMillis() - 1000); row1Time = new Time(System.currentTimeMillis() - 1000); row1Timestamp = new Timestamp(System.currentTimeMillis() + 10000); stmt.setDate(4, row1Date); stmt.setTime(5, row1Time); stmt.setTimestamp(6, row1Timestamp); stmt.execute(); stmt.setInt(1, 2); stmt.setString(2, " 2.2"); stmt.setString(3, " 2.2"); row2Date = new Date(System.currentTimeMillis() - 10000); row2Time = new Time(System.currentTimeMillis() - 1234); row2Timestamp = new Timestamp(System.currentTimeMillis() + 1234567); stmt.setDate(4, row2Date); stmt.setTime(5, row2Time); stmt.setTimestamp(6, row2Timestamp); stmt.execute(); stmt.setInt(1, 3); stmt.setString(2, "$3.3"); stmt.setString(3, "$3.3"); row3Date = new Date(System.currentTimeMillis() - 100); row3Time = new Time(System.currentTimeMillis() - 789); row3Timestamp = new Timestamp(System.currentTimeMillis() + 78901); stmt.setDate(4, row3Date); stmt.setTime(5, row3Time); stmt.setTimestamp(6, row3Timestamp); stmt.execute(); conn.commit(); conn.close(); } @Test public void testKeyFilterWithIntegerValue() throws Exception { String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(a_string) = 1"; int expectedId = 1; runOneRowQueryTest(query, expectedId); } @Test public void testKeyFilterWithDoubleValue() throws Exception { String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(a_string) = 2.2"; int expectedId = 2; runOneRowQueryTest(query, expectedId); } @Test public void testNonKeyFilterWithIntegerValue() throws Exception { String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(b_string) = 1"; int expectedId = 1; runOneRowQueryTest(query, expectedId); } @Test public void testNonKeyFilterWithDoubleValue() throws Exception { String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(b_string) = 2.2"; int expectedId = 2; runOneRowQueryTest(query, expectedId); } @Test public void testKeyProjectionWithIntegerValue() throws Exception { String query = "select to_number(a_string) from " + TO_NUMBER_TABLE_NAME + " where a_id = 1"; int expectedIntValue = 1; runOneRowQueryTest(query, expectedIntValue); } @Test public void testKeyProjectionWithDecimalValue() throws Exception { String query = "select to_number(a_string) from " + TO_NUMBER_TABLE_NAME + " where a_id = 2"; BigDecimal expectedDecimalValue = (BigDecimal) PDecimal.INSTANCE.toObject("2.2"); runOneRowQueryTest(query, expectedDecimalValue); } @Test public void testNonKeyProjectionWithIntegerValue() throws Exception { String query = "select to_number(b_string) from " + TO_NUMBER_TABLE_NAME + " where a_id = 1"; int expectedIntValue = 1; runOneRowQueryTest(query, expectedIntValue); } @Test public void testNonKeyProjectionWithDecimalValue() throws Exception { String query = "select to_number(b_string) from " + TO_NUMBER_TABLE_NAME + " where a_id = 2"; BigDecimal expectedDecimalValue = (BigDecimal) PDecimal.INSTANCE.toObject("2.2"); runOneRowQueryTest(query, expectedDecimalValue); } @Test public void testKeyFilterWithPatternParam() throws Exception { String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(a_string, '\u00A4###.####') = 3.3"; int expectedId = 3; runOneRowQueryTest(query, expectedId); } @Test public void testNonKeyFilterWithPatternParam() throws Exception { String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(b_string, '\u00A4#.#') = 3.3"; int expectedId = 3; runOneRowQueryTest(query, expectedId); } @Test public void testDateFilter() throws Exception { String pattern = "yyyyMMddHHmmssZ"; String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(a_date, '" + pattern + "') = " + row1Date.getTime() ; int expectedId = 1; runOneRowQueryTest(query, expectedId); } @Test public void testTimeFilter() throws Exception { String pattern = "HH:mm:ss z"; String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(a_time, '" + pattern + "') = " + row1Time.getTime() ; int expectedId = 1; runOneRowQueryTest(query, expectedId); } @Test public void testDateFilterWithoutPattern() throws Exception { String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(a_date) = " + row2Date.getTime() ; int expectedId = 2; runOneRowQueryTest(query, expectedId); } @Test public void testTimeFilterWithoutPattern() throws Exception { String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(a_time) = " + row2Time.getTime() ; int expectedId = 2; runOneRowQueryTest(query, expectedId); } @Test public void testTimeStampFilter() throws Exception { String pattern = "yyMMddHHmmssZ"; String query = "SELECT a_id FROM " + TO_NUMBER_TABLE_NAME + " WHERE to_number(a_timestamp, '" + pattern + "') = " + row1Timestamp.getTime() ; int expectedId = 1; runOneRowQueryTest(query, expectedId); } @Test public void testDateProjection() throws Exception { String query = "select to_number(a_date) from " + TO_NUMBER_TABLE_NAME + " where a_id = 1"; BigDecimal expectedDecimalValue = new BigDecimal(row1Date.getTime()); runOneRowQueryTest(query, expectedDecimalValue); } @Test public void testTimeProjection() throws Exception { String query = "select to_number(a_time) from " + TO_NUMBER_TABLE_NAME + " where a_id = 2"; BigDecimal expectedDecimalValue = new BigDecimal(row2Time.getTime()); runOneRowQueryTest(query, expectedDecimalValue); } @Test public void testTimeStampProjection() throws Exception { String query = "select to_number(a_timestamp) from " + TO_NUMBER_TABLE_NAME + " where a_id = 3"; BigDecimal expectedDecimalValue = new BigDecimal(row3Timestamp.getTime()); runOneRowQueryTest(query, expectedDecimalValue); } private void runOneRowQueryTest(String oneRowQuery, BigDecimal expectedDecimalValue) throws Exception { runOneRowQueryTest(oneRowQuery, false, null, expectedDecimalValue); } private void runOneRowQueryTest(String oneRowQuery, int expectedIntValue) throws Exception { runOneRowQueryTest(oneRowQuery, true, expectedIntValue, null); } private void runOneRowQueryTest(String oneRowQuery, boolean isIntegerColumn, Integer expectedIntValue, BigDecimal expectedDecimalValue) throws Exception { long ts = nextTimestamp(); String url = getUrl() + ";" + PhoenixRuntime.CURRENT_SCN_ATTRIB + "=" + ts; Connection conn = DriverManager.getConnection(url); try { PreparedStatement statement = conn.prepareStatement(oneRowQuery); ResultSet rs = statement.executeQuery(); assertTrue (rs.next()); if (isIntegerColumn) assertEquals(expectedIntValue.intValue(), rs.getInt(1)); else assertTrue(expectedDecimalValue == rs.getBigDecimal(1) || (expectedDecimalValue != null && expectedDecimalValue.compareTo(rs.getBigDecimal(1)) == 0)); assertFalse(rs.next()); } finally { conn.close(); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client.ccr; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.AbstractResponseTestCase; import org.elasticsearch.client.ccr.IndicesFollowStats.ShardFollowStats; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class CcrStatsResponseTests extends AbstractResponseTestCase<CcrStatsAction.Response, CcrStatsResponse> { @Override protected CcrStatsAction.Response createServerTestInstance(XContentType xContentType) { org.elasticsearch.xpack.core.ccr.AutoFollowStats autoFollowStats = new org.elasticsearch.xpack.core.ccr.AutoFollowStats( randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomReadExceptions(), randomTrackingClusters() ); FollowStatsAction.StatsResponses statsResponse = createStatsResponse(); return new CcrStatsAction.Response(autoFollowStats, statsResponse); } static NavigableMap<String, Tuple<Long, ElasticsearchException>> randomReadExceptions() { final int count = randomIntBetween(0, 16); final NavigableMap<String, Tuple<Long, ElasticsearchException>> readExceptions = new TreeMap<>(); for (int i = 0; i < count; i++) { readExceptions.put("" + i, Tuple.tuple(randomNonNegativeLong(), new ElasticsearchException(new IllegalStateException("index [" + i + "]")))); } return readExceptions; } static NavigableMap<String, org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster> randomTrackingClusters() { final int count = randomIntBetween(0, 16); final NavigableMap<String, org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster> readExceptions = new TreeMap<>(); for (int i = 0; i < count; i++) { readExceptions.put("" + i, new org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster(randomLong(), randomNonNegativeLong())); } return readExceptions; } static FollowStatsAction.StatsResponses createStatsResponse() { int numResponses = randomIntBetween(0, 8); List<FollowStatsAction.StatsResponse> responses = new ArrayList<>(numResponses); for (int i = 0; i < numResponses; i++) { ShardFollowNodeTaskStatus status = new ShardFollowNodeTaskStatus( randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4), randomInt(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomIntBetween(0, Integer.MAX_VALUE), randomIntBetween(0, Integer.MAX_VALUE), randomIntBetween(0, Integer.MAX_VALUE), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), Collections.emptyNavigableMap(), randomNonNegativeLong(), randomBoolean() ? new ElasticsearchException("fatal error") : null); responses.add(new FollowStatsAction.StatsResponse(status)); } return new FollowStatsAction.StatsResponses(Collections.emptyList(), Collections.emptyList(), responses); } @Override protected CcrStatsResponse doParseToClientInstance(XContentParser parser) throws IOException { return CcrStatsResponse.fromXContent(parser); } @Override protected void assertInstances(CcrStatsAction.Response serverTestInstance, CcrStatsResponse clientInstance) { { AutoFollowStats newAutoFollowStats = clientInstance.getAutoFollowStats(); org.elasticsearch.xpack.core.ccr.AutoFollowStats expectedAutoFollowStats = serverTestInstance.getAutoFollowStats(); assertThat(newAutoFollowStats.getNumberOfSuccessfulFollowIndices(), equalTo(expectedAutoFollowStats.getNumberOfSuccessfulFollowIndices())); assertThat(newAutoFollowStats.getNumberOfFailedRemoteClusterStateRequests(), equalTo(expectedAutoFollowStats.getNumberOfFailedRemoteClusterStateRequests())); assertThat(newAutoFollowStats.getNumberOfFailedFollowIndices(), equalTo(expectedAutoFollowStats.getNumberOfFailedFollowIndices())); assertThat(newAutoFollowStats.getRecentAutoFollowErrors().size(), equalTo(expectedAutoFollowStats.getRecentAutoFollowErrors().size())); assertThat(newAutoFollowStats.getRecentAutoFollowErrors().keySet(), equalTo(expectedAutoFollowStats.getRecentAutoFollowErrors().keySet())); for (final Map.Entry<String, Tuple<Long, ElasticsearchException>> entry : newAutoFollowStats.getRecentAutoFollowErrors().entrySet()) { // x-content loses the exception final Tuple<Long, ElasticsearchException> expected = expectedAutoFollowStats.getRecentAutoFollowErrors().get(entry.getKey()); assertThat(entry.getValue().v2().getMessage(), containsString(expected.v2().getMessage())); assertThat(entry.getValue().v1(), equalTo(expected.v1())); assertNotNull(entry.getValue().v2().getCause()); assertThat( entry.getValue().v2().getCause(), anyOf(instanceOf(ElasticsearchException.class), instanceOf(IllegalStateException.class))); assertThat(entry.getValue().v2().getCause().getMessage(), containsString(expected.v2().getCause().getMessage())); } } { IndicesFollowStats newIndicesFollowStats = clientInstance.getIndicesFollowStats(); // sort by index name, then shard ID final Map<String, Map<Integer, FollowStatsAction.StatsResponse>> expectedIndicesFollowStats = new TreeMap<>(); for (final FollowStatsAction.StatsResponse statsResponse : serverTestInstance.getFollowStats().getStatsResponses()) { expectedIndicesFollowStats.computeIfAbsent( statsResponse.status().followerIndex(), k -> new TreeMap<>()).put(statsResponse.status().getShardId(), statsResponse); } assertThat(newIndicesFollowStats.getShardFollowStats().size(), equalTo(expectedIndicesFollowStats.size())); assertThat(newIndicesFollowStats.getShardFollowStats().keySet(), equalTo(expectedIndicesFollowStats.keySet())); for (Map.Entry<String, List<ShardFollowStats>> indexEntry : newIndicesFollowStats.getShardFollowStats().entrySet()) { List<ShardFollowStats> newStats = indexEntry.getValue(); Map<Integer, FollowStatsAction.StatsResponse> expectedStats = expectedIndicesFollowStats.get(indexEntry.getKey()); assertThat(newStats.size(), equalTo(expectedStats.size())); for (int i = 0; i < newStats.size(); i++) { ShardFollowStats actualShardFollowStats = newStats.get(i); ShardFollowNodeTaskStatus expectedShardFollowStats = expectedStats.get(actualShardFollowStats.getShardId()).status(); assertThat(actualShardFollowStats.getRemoteCluster(), equalTo(expectedShardFollowStats.getRemoteCluster())); assertThat(actualShardFollowStats.getLeaderIndex(), equalTo(expectedShardFollowStats.leaderIndex())); assertThat(actualShardFollowStats.getFollowerIndex(), equalTo(expectedShardFollowStats.followerIndex())); assertThat(actualShardFollowStats.getShardId(), equalTo(expectedShardFollowStats.getShardId())); assertThat(actualShardFollowStats.getLeaderGlobalCheckpoint(), equalTo(expectedShardFollowStats.leaderGlobalCheckpoint())); assertThat(actualShardFollowStats.getLeaderMaxSeqNo(), equalTo(expectedShardFollowStats.leaderMaxSeqNo())); assertThat(actualShardFollowStats.getFollowerGlobalCheckpoint(), equalTo(expectedShardFollowStats.followerGlobalCheckpoint())); assertThat(actualShardFollowStats.getLastRequestedSeqNo(), equalTo(expectedShardFollowStats.lastRequestedSeqNo())); assertThat(actualShardFollowStats.getOutstandingReadRequests(), equalTo(expectedShardFollowStats.outstandingReadRequests())); assertThat(actualShardFollowStats.getOutstandingWriteRequests(), equalTo(expectedShardFollowStats.outstandingWriteRequests())); assertThat(actualShardFollowStats.getWriteBufferOperationCount(), equalTo(expectedShardFollowStats.writeBufferOperationCount())); assertThat(actualShardFollowStats.getFollowerMappingVersion(), equalTo(expectedShardFollowStats.followerMappingVersion())); assertThat(actualShardFollowStats.getFollowerSettingsVersion(), equalTo(expectedShardFollowStats.followerSettingsVersion())); assertThat(actualShardFollowStats.getFollowerAliasesVersion(), equalTo(expectedShardFollowStats.followerAliasesVersion())); assertThat(actualShardFollowStats.getTotalReadTimeMillis(), equalTo(expectedShardFollowStats.totalReadTimeMillis())); assertThat(actualShardFollowStats.getSuccessfulReadRequests(), equalTo(expectedShardFollowStats.successfulReadRequests())); assertThat(actualShardFollowStats.getFailedReadRequests(), equalTo(expectedShardFollowStats.failedReadRequests())); assertThat(actualShardFollowStats.getOperationsReads(), equalTo(expectedShardFollowStats.operationsReads())); assertThat(actualShardFollowStats.getBytesRead(), equalTo(expectedShardFollowStats.bytesRead())); assertThat(actualShardFollowStats.getTotalWriteTimeMillis(), equalTo(expectedShardFollowStats.totalWriteTimeMillis())); assertThat(actualShardFollowStats.getSuccessfulWriteRequests(), equalTo(expectedShardFollowStats.successfulWriteRequests())); assertThat(actualShardFollowStats.getFailedWriteRequests(), equalTo(expectedShardFollowStats.failedWriteRequests())); assertThat(actualShardFollowStats.getOperationWritten(), equalTo(expectedShardFollowStats.operationWritten())); assertThat(actualShardFollowStats.getReadExceptions().size(), equalTo(expectedShardFollowStats.readExceptions().size())); assertThat(actualShardFollowStats.getReadExceptions().keySet(), equalTo(expectedShardFollowStats.readExceptions().keySet())); for (final Map.Entry<Long, Tuple<Integer, ElasticsearchException>> entry : actualShardFollowStats.getReadExceptions().entrySet()) { final Tuple<Integer, ElasticsearchException> expectedTuple = expectedShardFollowStats.readExceptions().get(entry.getKey()); assertThat(entry.getValue().v1(), equalTo(expectedTuple.v1())); // x-content loses the exception final ElasticsearchException expected = expectedTuple.v2(); assertThat(entry.getValue().v2().getMessage(), containsString(expected.getMessage())); assertNotNull(entry.getValue().v2().getCause()); assertThat( entry.getValue().v2().getCause(), anyOf(instanceOf(ElasticsearchException.class), instanceOf(IllegalStateException.class))); assertThat(entry.getValue().v2().getCause().getMessage(), containsString(expected.getCause().getMessage())); } assertThat(actualShardFollowStats.getTimeSinceLastReadMillis(), equalTo(expectedShardFollowStats.timeSinceLastReadMillis())); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.daemon; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import org.apache.storm.Config; import org.apache.storm.Thrift; import org.apache.storm.generated.GlobalStreamId; import org.apache.storm.generated.Grouping; import org.apache.storm.grouping.CustomStreamGrouping; import org.apache.storm.grouping.LoadAwareCustomStreamGrouping; import org.apache.storm.grouping.LoadAwareShuffleGrouping; import org.apache.storm.grouping.LoadMapping; import org.apache.storm.grouping.ShuffleGrouping; import org.apache.storm.task.WorkerTopologyContext; import org.apache.storm.tuple.Fields; import org.apache.storm.utils.Utils; import org.apache.storm.utils.TupleUtils; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; public class GrouperFactory { public static LoadAwareCustomStreamGrouping mkGrouper(WorkerTopologyContext context, String componentId, String streamId, Fields outFields, Grouping thriftGrouping, List<Integer> unsortedTargetTasks, Map<String, Object> topoConf) { List<Integer> targetTasks = Ordering.natural().sortedCopy(unsortedTargetTasks); final boolean isNotLoadAware = (null != topoConf.get(Config.TOPOLOGY_DISABLE_LOADAWARE_MESSAGING) && (boolean) topoConf .get(Config.TOPOLOGY_DISABLE_LOADAWARE_MESSAGING)); CustomStreamGrouping result = null; switch (Thrift.groupingType(thriftGrouping)) { case FIELDS: if (Thrift.isGlobalGrouping(thriftGrouping)) { result = new GlobalGrouper(); } else { result = new FieldsGrouper(outFields, thriftGrouping); } break; case SHUFFLE: if (isNotLoadAware) { result = new ShuffleGrouping(); } else { result = new LoadAwareShuffleGrouping(); } break; case ALL: result = new AllGrouper(); break; case LOCAL_OR_SHUFFLE: // Prefer local tasks as target tasks if possible Set<Integer> sameTasks = Sets.intersection(Sets.newHashSet(targetTasks), Sets.newHashSet(context.getThisWorkerTasks())); targetTasks = (sameTasks.isEmpty()) ? targetTasks : new ArrayList<>(sameTasks); if (isNotLoadAware) { result = new ShuffleGrouping(); } else { result = new LoadAwareShuffleGrouping(); } break; case NONE: result = new NoneGrouper(); break; case CUSTOM_OBJECT: result = (CustomStreamGrouping) Thrift.instantiateJavaObject(thriftGrouping.get_custom_object()); break; case CUSTOM_SERIALIZED: result = Utils.javaDeserialize(thriftGrouping.get_custom_serialized(), CustomStreamGrouping.class); break; case DIRECT: result = DIRECT; break; default: result = null; break; } if (null != result) { result.prepare(context, new GlobalStreamId(componentId, streamId), targetTasks); } if (result instanceof LoadAwareCustomStreamGrouping) { return (LoadAwareCustomStreamGrouping) result; } else { return new BasicLoadAwareCustomStreamGrouping (result); } } /** * A bridge between CustomStreamGrouping and LoadAwareCustomStreamGrouping */ public static class BasicLoadAwareCustomStreamGrouping implements LoadAwareCustomStreamGrouping { private final CustomStreamGrouping customStreamGrouping; public BasicLoadAwareCustomStreamGrouping(CustomStreamGrouping customStreamGrouping) { this.customStreamGrouping = customStreamGrouping; } @Override public void refreshLoad(LoadMapping loadMapping) { } @Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { customStreamGrouping.prepare(context, stream, targetTasks); } @Override public List<Integer> chooseTasks(int taskId, List<Object> values) { return customStreamGrouping.chooseTasks(taskId, values); } } public static class FieldsGrouper implements CustomStreamGrouping { private Fields outFields; private List<Integer> targetTasks; private Fields groupFields; private int numTasks; public FieldsGrouper(Fields outFields, Grouping thriftGrouping) { this.outFields = outFields; this.groupFields = new Fields(Thrift.fieldGrouping(thriftGrouping)); } @Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { this.targetTasks = targetTasks; this.numTasks = targetTasks.size(); } @Override public List<Integer> chooseTasks(int taskId, List<Object> values) { int targetTaskIndex = TupleUtils.chooseTaskIndex(outFields.select(groupFields, values), numTasks); return Collections.singletonList(targetTasks.get(targetTaskIndex)); } } public static class GlobalGrouper implements CustomStreamGrouping { private List<Integer> targetTasks; public GlobalGrouper() { } @Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { this.targetTasks = targetTasks; } @Override public List<Integer> chooseTasks(int taskId, List<Object> values) { if (targetTasks.isEmpty()) { return null; } // It's possible for target to have multiple tasks if it reads multiple sources return Collections.singletonList(targetTasks.get(0)); } } public static class NoneGrouper implements CustomStreamGrouping { private List<Integer> targetTasks; private int numTasks; private final Random random; public NoneGrouper() { random = new Random(); } @Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { this.targetTasks = targetTasks; this.numTasks = targetTasks.size(); } @Override public List<Integer> chooseTasks(int taskId, List<Object> values) { int index = random.nextInt(numTasks); return Collections.singletonList(targetTasks.get(index)); } } public static class AllGrouper implements CustomStreamGrouping { private List<Integer> targetTasks; @Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { this.targetTasks = targetTasks; } @Override public List<Integer> chooseTasks(int taskId, List<Object> values) { return targetTasks; } } // A no-op grouper public static final LoadAwareCustomStreamGrouping DIRECT = new LoadAwareCustomStreamGrouping() { @Override public void refreshLoad(LoadMapping loadMapping) { } @Override public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) { } @Override public List<Integer> chooseTasks(int taskId, List<Object> values) { return null; } }; }
package edu.umkc.permitme.domain; import com.fasterxml.jackson.annotation.JsonIgnore; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; import javax.persistence.*; import java.io.Serializable; import java.util.HashSet; import java.util.Set; import java.util.Objects; /** * A Contractor. */ @Entity @Table(name = "contractor") @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) public class Contractor implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.AUTO) private Long id; @Column(name = "business_name") private String businessName; @Column(name = "business_license_number") private String businessLicenseNumber; @Column(name = "phone_number") private String phoneNumber; @Column(name = "street_address") private String streetAddress; @Column(name = "zip_code") private String zipCode; @Column(name = "city") private String city; @Column(name = "state") private String state; @Column(name = "agent_first_name") private String agentFirstName; @Column(name = "agent_middle_initial") private String agentMiddleInitial; @Column(name = "agent_last_name") private String agentLastName; @Column(name = "agent_job_title") private String agentJobTitle; @Column(name = "email") private String email; @Column(name = "agent_phone_number") private String agentPhoneNumber; @Column(name = "contract_license_number") private String contractLicenseNumber; @Column(name = "occupational_license_number") private String occupationalLicenseNumber; @Column(name = "master_plumber_license_number") private String masterPlumberLicenseNumber; @Column(name = "has_general_liability_coverage") private Boolean hasGeneralLiabilityCoverage; @Column(name = "requirements_met") private Boolean requirementsMet; @Column(name = "carrier") private String carrier; @Column(name = "policy_number") private String policyNumber; @OneToMany(mappedBy = "contractor") @JsonIgnore @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) private Set<Project> projects = new HashSet<>(); @ManyToOne private User user; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getBusinessName() { return businessName; } public void setBusinessName(String businessName) { this.businessName = businessName; } public String getBusinessLicenseNumber() { return businessLicenseNumber; } public void setBusinessLicenseNumber(String businessLicenseNumber) { this.businessLicenseNumber = businessLicenseNumber; } public String getPhoneNumber() { return phoneNumber; } public void setPhoneNumber(String phoneNumber) { this.phoneNumber = phoneNumber; } public String getStreetAddress() { return streetAddress; } public void setStreetAddress(String streetAddress) { this.streetAddress = streetAddress; } public String getZipCode() { return zipCode; } public void setZipCode(String zipCode) { this.zipCode = zipCode; } public String getCity() { return city; } public void setCity(String city) { this.city = city; } public String getState() { return state; } public void setState(String state) { this.state = state; } public String getAgentFirstName() { return agentFirstName; } public void setAgentFirstName(String agentFirstName) { this.agentFirstName = agentFirstName; } public String getAgentMiddleInitial() { return agentMiddleInitial; } public void setAgentMiddleInitial(String agentMiddleInitial) { this.agentMiddleInitial = agentMiddleInitial; } public String getAgentLastName() { return agentLastName; } public void setAgentLastName(String agentLastName) { this.agentLastName = agentLastName; } public String getAgentJobTitle() { return agentJobTitle; } public void setAgentJobTitle(String agentJobTitle) { this.agentJobTitle = agentJobTitle; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getAgentPhoneNumber() { return agentPhoneNumber; } public void setAgentPhoneNumber(String agentPhoneNumber) { this.agentPhoneNumber = agentPhoneNumber; } public String getContractLicenseNumber() { return contractLicenseNumber; } public void setContractLicenseNumber(String contractLicenseNumber) { this.contractLicenseNumber = contractLicenseNumber; } public String getOccupationalLicenseNumber() { return occupationalLicenseNumber; } public void setOccupationalLicenseNumber(String occupationalLicenseNumber) { this.occupationalLicenseNumber = occupationalLicenseNumber; } public String getMasterPlumberLicenseNumber() { return masterPlumberLicenseNumber; } public void setMasterPlumberLicenseNumber(String masterPlumberLicenseNumber) { this.masterPlumberLicenseNumber = masterPlumberLicenseNumber; } public Boolean isHasGeneralLiabilityCoverage() { return hasGeneralLiabilityCoverage; } public void setHasGeneralLiabilityCoverage(Boolean hasGeneralLiabilityCoverage) { this.hasGeneralLiabilityCoverage = hasGeneralLiabilityCoverage; } public Boolean isRequirementsMet() { return requirementsMet; } public void setRequirementsMet(Boolean requirementsMet) { this.requirementsMet = requirementsMet; } public String getCarrier() { return carrier; } public void setCarrier(String carrier) { this.carrier = carrier; } public String getPolicyNumber() { return policyNumber; } public void setPolicyNumber(String policyNumber) { this.policyNumber = policyNumber; } public Set<Project> getProjects() { return projects; } public void setProjects(Set<Project> projects) { this.projects = projects; } public User getUser() { return user; } public void setUser(User user) { this.user = user; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Contractor contractor = (Contractor) o; if(contractor.id == null || id == null) { return false; } return Objects.equals(id, contractor.id); } @Override public int hashCode() { return Objects.hashCode(id); } @Override public String toString() { return "Contractor{" + "id=" + id + ", businessName='" + businessName + "'" + ", businessLicenseNumber='" + businessLicenseNumber + "'" + ", phoneNumber='" + phoneNumber + "'" + ", streetAddress='" + streetAddress + "'" + ", zipCode='" + zipCode + "'" + ", city='" + city + "'" + ", state='" + state + "'" + ", agentFirstName='" + agentFirstName + "'" + ", agentMiddleInitial='" + agentMiddleInitial + "'" + ", agentLastName='" + agentLastName + "'" + ", agentJobTitle='" + agentJobTitle + "'" + ", email='" + email + "'" + ", agentPhoneNumber='" + agentPhoneNumber + "'" + ", contractLicenseNumber='" + contractLicenseNumber + "'" + ", occupationalLicenseNumber='" + occupationalLicenseNumber + "'" + ", masterPlumberLicenseNumber='" + masterPlumberLicenseNumber + "'" + ", hasGeneralLiabilityCoverage='" + hasGeneralLiabilityCoverage + "'" + ", requirementsMet='" + requirementsMet + "'" + ", carrier='" + carrier + "'" + ", policyNumber='" + policyNumber + "'" + '}'; } }
package org.apache.lucene.analysis; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.io.Reader; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.CharacterUtils; import org.apache.lucene.util.Version; import org.apache.lucene.util.VirtualMethod; import org.apache.lucene.util.CharacterUtils.CharacterBuffer; /** * An abstract base class for simple, character-oriented tokenizers. * <p> * <a name="version">You must specify the required {@link Version} compatibility * when creating {@link CharTokenizer}: * <ul> * <li>As of 3.1, {@link CharTokenizer} uses an int based API to normalize and * detect token codepoints. See {@link #isTokenChar(int)} and * {@link #normalize(int)} for details.</li> * </ul> * <p> * A new {@link CharTokenizer} API has been introduced with Lucene 3.1. This API * moved from UTF-16 code units to UTF-32 codepoints to eventually add support * for <a href= * "http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Character.html#supplementary" * >supplementary characters</a>. The old <i>char</i> based API has been * deprecated and should be replaced with the <i>int</i> based methods * {@link #isTokenChar(int)} and {@link #normalize(int)}. * </p> * <p> * As of Lucene 3.1 each {@link CharTokenizer} - constructor expects a * {@link Version} argument. Based on the given {@link Version} either the new * API or a backwards compatibility layer is used at runtime. For * {@link Version} < 3.1 the backwards compatibility layer ensures correct * behavior even for indexes build with previous versions of Lucene. If a * {@link Version} >= 3.1 is used {@link CharTokenizer} requires the new API to * be implemented by the instantiated class. Yet, the old <i>char</i> based API * is not required anymore even if backwards compatibility must be preserved. * {@link CharTokenizer} subclasses implementing the new API are fully backwards * compatible if instantiated with {@link Version} < 3.1. * </p> * <p> * <strong>Note:</strong> If you use a subclass of {@link CharTokenizer} with {@link Version} >= * 3.1 on an index build with a version < 3.1, created tokens might not be * compatible with the terms in your index. * </p> **/ public abstract class CharTokenizer extends Tokenizer { /** * Creates a new {@link CharTokenizer} instance * * @param matchVersion * Lucene version to match See {@link <a href="#version">above</a>} * @param input * the input to split up into tokens */ public CharTokenizer(Version matchVersion, Reader input) { super(input); charUtils = CharacterUtils.getInstance(matchVersion); useOldAPI = useOldAPI(matchVersion); } /** * Creates a new {@link CharTokenizer} instance * * @param matchVersion * Lucene version to match See {@link <a href="#version">above</a>} * @param source * the attribute source to use for this {@link Tokenizer} * @param input * the input to split up into tokens */ public CharTokenizer(Version matchVersion, AttributeSource source, Reader input) { super(source, input); charUtils = CharacterUtils.getInstance(matchVersion); useOldAPI = useOldAPI(matchVersion); } /** * Creates a new {@link CharTokenizer} instance * * @param matchVersion * Lucene version to match See {@link <a href="#version">above</a>} * @param factory * the attribute factory to use for this {@link Tokenizer} * @param input * the input to split up into tokens */ public CharTokenizer(Version matchVersion, AttributeFactory factory, Reader input) { super(factory, input); charUtils = CharacterUtils.getInstance(matchVersion); useOldAPI = useOldAPI(matchVersion); } /** * Creates a new {@link CharTokenizer} instance * @param input the input to split up into tokens * @deprecated use {@link #CharTokenizer(Version, Reader)} instead. This will be * removed in Lucene 4.0. */ @Deprecated public CharTokenizer(Reader input) { this(Version.LUCENE_30, input); } /** * Creates a new {@link CharTokenizer} instance * @param input the input to split up into tokens * @param source the attribute source to use for this {@link Tokenizer} * @deprecated use {@link #CharTokenizer(Version, AttributeSource, Reader)} instead. This will be * removed in Lucene 4.0. */ @Deprecated public CharTokenizer(AttributeSource source, Reader input) { this(Version.LUCENE_30, source, input); } /** * Creates a new {@link CharTokenizer} instance * @param input the input to split up into tokens * @param factory the attribute factory to use for this {@link Tokenizer} * @deprecated use {@link #CharTokenizer(Version, AttributeSource.AttributeFactory, Reader)} instead. This will be * removed in Lucene 4.0. */ @Deprecated public CharTokenizer(AttributeFactory factory, Reader input) { this(Version.LUCENE_30, factory, input); } private int offset = 0, bufferIndex = 0, dataLen = 0, finalOffset = 0; private static final int MAX_WORD_LEN = 255; private static final int IO_BUFFER_SIZE = 4096; private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);; private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); private final CharacterUtils charUtils; private final CharacterBuffer ioBuffer = CharacterUtils.newCharacterBuffer(IO_BUFFER_SIZE); /** * @deprecated this will be removed in lucene 4.0 */ @Deprecated private final boolean useOldAPI; /** * @deprecated this will be removed in lucene 4.0 */ @Deprecated private static final VirtualMethod<CharTokenizer> isTokenCharMethod = new VirtualMethod<CharTokenizer>(CharTokenizer.class, "isTokenChar", char.class); /** * @deprecated this will be removed in lucene 4.0 */ @Deprecated private static final VirtualMethod<CharTokenizer> normalizeMethod = new VirtualMethod<CharTokenizer>(CharTokenizer.class, "normalize", char.class); /** * Returns true iff a UTF-16 code unit should be included in a token. This * tokenizer generates as tokens adjacent sequences of characters which * satisfy this predicate. Characters for which this is <code>false</code> are * used to define token boundaries and are not included in tokens. * <p> * Note: This method cannot handle <a href= * "http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Character.html#supplementary" * >supplementary characters</a>. To support all Unicode characters, including * supplementary characters, use the {@link #isTokenChar(int)} method. * </p> * * @deprecated use {@link #isTokenChar(int)} instead. This method will be * removed in Lucene 4.0. */ @Deprecated protected boolean isTokenChar(char c) { return isTokenChar((int)c); } /** * Called on each token UTF-16 code unit to normalize it before it is added to the * token. The default implementation does nothing. Subclasses may use this to, * e.g., lowercase tokens. * <p> * Note: This method cannot handle <a href= * "http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Character.html#supplementary" * >supplementary characters</a>. To support all Unicode characters, including * supplementary characters, use the {@link #normalize(int)} method. * </p> * * @deprecated use {@link #normalize(int)} instead. This method will be * removed in Lucene 4.0. */ @Deprecated protected char normalize(char c) { return (char) normalize((int) c); } /** * Returns true iff a codepoint should be included in a token. This tokenizer * generates as tokens adjacent sequences of codepoints which satisfy this * predicate. Codepoints for which this is false are used to define token * boundaries and are not included in tokens. * <p> * As of Lucene 3.1 the char based API ({@link #isTokenChar(char)} and * {@link #normalize(char)}) has been depreciated in favor of a Unicode 4.0 * compatible int based API to support codepoints instead of UTF-16 code * units. Subclasses of {@link CharTokenizer} must not override the char based * methods if a {@link Version} >= 3.1 is passed to the constructor. * <p> * <p> * NOTE: This method will be marked <i>abstract</i> in Lucene 4.0. * </p> */ protected boolean isTokenChar(int c) { throw new UnsupportedOperationException("since LUCENE_31 subclasses of CharTokenizer must implement isTokenChar(int)"); } /** * Called on each token character to normalize it before it is added to the * token. The default implementation does nothing. Subclasses may use this to, * e.g., lowercase tokens. * <p> * As of Lucene 3.1 the char based API ({@link #isTokenChar(char)} and * {@link #normalize(char)}) has been depreciated in favor of a Unicode 4.0 * compatible int based API to support codepoints instead of UTF-16 code * units. Subclasses of {@link CharTokenizer} must not override the char based * methods if a {@link Version} >= 3.1 is passed to the constructor. * <p> * <p> * NOTE: This method will be marked <i>abstract</i> in Lucene 4.0. * </p> */ protected int normalize(int c) { return c; } @Override public final boolean incrementToken() throws IOException { clearAttributes(); if(useOldAPI) // TODO remove this in LUCENE 4.0 return incrementTokenOld(); int length = 0; int start = -1; // this variable is always initialized char[] buffer = termAtt.buffer(); while (true) { if (bufferIndex >= dataLen) { offset += dataLen; if(!charUtils.fill(ioBuffer, input)) { // read supplementary char aware with CharacterUtils dataLen = 0; // so next offset += dataLen won't decrement offset if (length > 0) { break; } else { finalOffset = correctOffset(offset); return false; } } dataLen = ioBuffer.getLength(); bufferIndex = 0; } // use CharacterUtils here to support < 3.1 UTF-16 code unit behavior if the char based methods are gone final int c = charUtils.codePointAt(ioBuffer.getBuffer(), bufferIndex); bufferIndex += Character.charCount(c); if (isTokenChar(c)) { // if it's a token char if (length == 0) { // start of token assert start == -1; start = offset + bufferIndex - 1; } else if (length >= buffer.length-1) { // check if a supplementary could run out of bounds buffer = termAtt.resizeBuffer(2+length); // make sure a supplementary fits in the buffer } length += Character.toChars(normalize(c), buffer, length); // buffer it, normalized if (length >= MAX_WORD_LEN) // buffer overflow! make sure to check for >= surrogate pair could break == test break; } else if (length > 0) // at non-Letter w/ chars break; // return 'em } termAtt.setLength(length); assert start != -1; offsetAtt.setOffset(correctOffset(start), finalOffset = correctOffset(start+length)); return true; } /** * The <= 3.0 version of incrementToken. This is a backwards compat implementation used * if a version <= 3.0 is provided to the ctor. * @deprecated remove in 4.0 */ @Deprecated private boolean incrementTokenOld() throws IOException { int length = 0; int start = -1; // this variable is always initialized char[] buffer = termAtt.buffer(); final char[] oldIoBuffer = ioBuffer.getBuffer(); while (true) { if (bufferIndex >= dataLen) { offset += dataLen; dataLen = input.read(oldIoBuffer); if (dataLen == -1) { dataLen = 0; // so next offset += dataLen won't decrement offset if (length > 0) { break; } else { finalOffset = correctOffset(offset); return false; } } bufferIndex = 0; } final char c = oldIoBuffer[bufferIndex++]; if (isTokenChar(c)) { // if it's a token char if (length == 0) { // start of token assert start == -1; start = offset + bufferIndex - 1; } else if (length == buffer.length) { buffer = termAtt.resizeBuffer(1+length); } buffer[length++] = normalize(c); // buffer it, normalized if (length == MAX_WORD_LEN) // buffer overflow! break; } else if (length > 0) // at non-Letter w/ chars break; // return 'em } termAtt.setLength(length); assert start != -1; offsetAtt.setOffset(correctOffset(start), correctOffset(start+length)); return true; } @Override public final void end() { // set final offset offsetAtt.setOffset(finalOffset, finalOffset); } @Override public void reset(Reader input) throws IOException { super.reset(input); bufferIndex = 0; offset = 0; dataLen = 0; finalOffset = 0; ioBuffer.reset(); // make sure to reset the IO buffer!! } /** * @deprecated this will be removed in lucene 4.0 */ @Deprecated private boolean useOldAPI(Version matchVersion) { final Class<? extends CharTokenizer> clazz = this.getClass(); if (matchVersion.onOrAfter(Version.LUCENE_31) && (isTokenCharMethod.isOverriddenAsOf(clazz) || normalizeMethod .isOverriddenAsOf(clazz))) throw new IllegalArgumentException( "For matchVersion >= LUCENE_31, CharTokenizer subclasses must not override isTokenChar(char) or normalize(char)."); return !matchVersion.onOrAfter(Version.LUCENE_31); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.bulk.byscroll; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import java.io.IOException; import java.util.Arrays; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; public abstract class AbstractBulkByScrollRequest<Self extends AbstractBulkByScrollRequest<Self>> extends ActionRequest { public static final int SIZE_ALL_MATCHES = -1; private static final TimeValue DEFAULT_SCROLL_TIMEOUT = timeValueMinutes(5); private static final int DEFAULT_SCROLL_SIZE = 1000; /** * The search to be executed. */ private SearchRequest searchRequest; /** * Maximum number of processed documents. Defaults to -1 meaning process all * documents. */ private int size = SIZE_ALL_MATCHES; /** * Should version conflicts cause aborts? Defaults to true. */ private boolean abortOnVersionConflict = true; /** * Call refresh on the indexes we've written to after the request ends? */ private boolean refresh = false; /** * Timeout to wait for the shards on to be available for each bulk request? */ private TimeValue timeout = ReplicationRequest.DEFAULT_TIMEOUT; /** * The number of shard copies that must be active before proceeding with the write. */ private ActiveShardCount activeShardCount = ActiveShardCount.DEFAULT; /** * Initial delay after a rejection before retrying a bulk request. With the default maxRetries the total backoff for retrying rejections * is about one minute per bulk request. Once the entire bulk request is successful the retry counter resets. */ private TimeValue retryBackoffInitialTime = timeValueMillis(500); /** * Total number of retries attempted for rejections. There is no way to ask for unlimited retries. */ private int maxRetries = 11; /** * The throttle for this request in sub-requests per second. {@link Float#POSITIVE_INFINITY} means set no throttle and that is the * default. Throttling is done between batches, as we start the next scroll requests. That way we can increase the scroll's timeout to * make sure that it contains any time that we might wait. */ private float requestsPerSecond = Float.POSITIVE_INFINITY; /** * Should this task store its result? */ private boolean shouldStoreResult; /** * The number of slices this task should be divided into. Defaults to 1 meaning the task isn't sliced into subtasks. */ private int slices = 1; /** * Constructor for deserialization. */ public AbstractBulkByScrollRequest() { } /** * Constructor for actual use. * * @param searchRequest the search request to execute to get the documents to process * @param setDefaults should this request set the defaults on the search request? Usually set to true but leave it false to support * request slicing */ public AbstractBulkByScrollRequest(SearchRequest searchRequest, boolean setDefaults) { this.searchRequest = searchRequest; // Set the defaults which differ from SearchRequest's defaults. if (setDefaults) { searchRequest.scroll(DEFAULT_SCROLL_TIMEOUT); searchRequest.source(new SearchSourceBuilder()); searchRequest.source().size(DEFAULT_SCROLL_SIZE); } } /** * `this` cast to Self. Used for building fluent methods without cast * warnings. */ protected abstract Self self(); @Override public ActionRequestValidationException validate() { ActionRequestValidationException e = searchRequest.validate(); if (searchRequest.source().from() != -1) { e = addValidationError("from is not supported in this context", e); } if (searchRequest.source().storedFields() != null) { e = addValidationError("stored_fields is not supported in this context", e); } if (maxRetries < 0) { e = addValidationError("retries cannnot be negative", e); } if (false == (size == -1 || size > 0)) { e = addValidationError( "size should be greater than 0 if the request is limited to some number of documents or -1 if it isn't but it was [" + size + "]", e); } if (searchRequest.source().slice() != null && slices != 1) { e = addValidationError("can't specify both slice and workers", e); } return e; } /** * Maximum number of processed documents. Defaults to -1 meaning process all * documents. */ public int getSize() { return size; } /** * Maximum number of processed documents. Defaults to -1 meaning process all * documents. */ public Self setSize(int size) { this.size = size; return self(); } /** * Should version conflicts cause aborts? Defaults to false. */ public boolean isAbortOnVersionConflict() { return abortOnVersionConflict; } /** * Should version conflicts cause aborts? Defaults to false. */ public Self setAbortOnVersionConflict(boolean abortOnVersionConflict) { this.abortOnVersionConflict = abortOnVersionConflict; return self(); } /** * Sets abortOnVersionConflict based on REST-friendly names. */ public void setConflicts(String conflicts) { switch (conflicts) { case "proceed": setAbortOnVersionConflict(false); return; case "abort": setAbortOnVersionConflict(true); return; default: throw new IllegalArgumentException("conflicts may only be \"proceed\" or \"abort\" but was [" + conflicts + "]"); } } /** * The search request that matches the documents to process. */ public SearchRequest getSearchRequest() { return searchRequest; } /** * Call refresh on the indexes we've written to after the request ends? */ public boolean isRefresh() { return refresh; } /** * Call refresh on the indexes we've written to after the request ends? */ public Self setRefresh(boolean refresh) { this.refresh = refresh; return self(); } /** * Timeout to wait for the shards on to be available for each bulk request? */ public TimeValue getTimeout() { return timeout; } /** * Timeout to wait for the shards on to be available for each bulk request? */ public Self setTimeout(TimeValue timeout) { this.timeout = timeout; return self(); } /** * The number of shard copies that must be active before proceeding with the write. */ public ActiveShardCount getWaitForActiveShards() { return activeShardCount; } /** * Sets the number of shard copies that must be active before proceeding with the write. * See {@link ReplicationRequest#waitForActiveShards(ActiveShardCount)} for details. */ public Self setWaitForActiveShards(ActiveShardCount activeShardCount) { this.activeShardCount = activeShardCount; return self(); } /** * A shortcut for {@link #setWaitForActiveShards(ActiveShardCount)} where the numerical * shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)} * to get the ActiveShardCount. */ public Self setWaitForActiveShards(final int waitForActiveShards) { return setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards)); } /** * Initial delay after a rejection before retrying request. */ public TimeValue getRetryBackoffInitialTime() { return retryBackoffInitialTime; } /** * Set the initial delay after a rejection before retrying request. */ public Self setRetryBackoffInitialTime(TimeValue retryBackoffInitialTime) { this.retryBackoffInitialTime = retryBackoffInitialTime; return self(); } /** * Total number of retries attempted for rejections. */ public int getMaxRetries() { return maxRetries; } /** * Set the total number of retries attempted for rejections. There is no way to ask for unlimited retries. */ public Self setMaxRetries(int maxRetries) { this.maxRetries = maxRetries; return self(); } /** * The throttle for this request in sub-requests per second. {@link Float#POSITIVE_INFINITY} means set no throttle and that is the * default. Throttling is done between batches, as we start the next scroll requests. That way we can increase the scroll's timeout to * make sure that it contains any time that we might wait. */ public float getRequestsPerSecond() { return requestsPerSecond; } /** * Set the throttle for this request in sub-requests per second. {@link Float#POSITIVE_INFINITY} means set no throttle and that is the * default. Throttling is done between batches, as we start the next scroll requests. That way we can increase the scroll's timeout to * make sure that it contains any time that we might wait. */ public Self setRequestsPerSecond(float requestsPerSecond) { if (requestsPerSecond <= 0) { throw new IllegalArgumentException( "[requests_per_second] must be greater than 0. Use Float.POSITIVE_INFINITY to disable throttling."); } this.requestsPerSecond = requestsPerSecond; return self(); } /** * Should this task store its result after it has finished? */ public Self setShouldStoreResult(boolean shouldStoreResult) { this.shouldStoreResult = shouldStoreResult; return self(); } @Override public boolean getShouldStoreResult() { return shouldStoreResult; } /** * The number of slices this task should be divided into. Defaults to 1 meaning the task isn't sliced into subtasks. */ public Self setSlices(int slices) { if (slices < 1) { throw new IllegalArgumentException("[slices] must be at least 1"); } this.slices = slices; return self(); } /** * The number of slices this task should be divided into. Defaults to 1 meaning the task isn't sliced into subtasks. */ public int getSlices() { return slices; } /** * Build a new request for a slice of the parent request. */ protected abstract Self forSlice(TaskId slicingTask, SearchRequest slice); /** * Setup a clone of this request with the information needed to process a slice of it. */ protected Self doForSlice(Self request, TaskId slicingTask) { request.setAbortOnVersionConflict(abortOnVersionConflict).setRefresh(refresh).setTimeout(timeout) .setWaitForActiveShards(activeShardCount).setRetryBackoffInitialTime(retryBackoffInitialTime).setMaxRetries(maxRetries) // Parent task will store result .setShouldStoreResult(false) // Split requests per second between all slices .setRequestsPerSecond(requestsPerSecond / slices) // Size is split between workers. This means the size might round down! .setSize(size == SIZE_ALL_MATCHES ? SIZE_ALL_MATCHES : size / slices) // Sub requests don't have workers .setSlices(1); // Set the parent task so this task is cancelled if we cancel the parent request.setParentTask(slicingTask); // TODO It'd be nice not to refresh on every slice. Instead we should refresh after the sub requests finish. return request; } @Override public Task createTask(long id, String type, String action, TaskId parentTaskId) { if (slices > 1) { return new ParentBulkByScrollTask(id, type, action, getDescription(), parentTaskId, slices); } /* Extract the slice from the search request so it'll be available in the status. This is potentially useful for users that manually * slice their search requests so they can keep track of it and **absolutely** useful for automatically sliced reindex requests so * they can properly track the responses. */ Integer sliceId = searchRequest.source().slice() == null ? null : searchRequest.source().slice().getId(); return new WorkingBulkByScrollTask(id, type, action, getDescription(), parentTaskId, sliceId, requestsPerSecond); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); searchRequest = new SearchRequest(); searchRequest.readFrom(in); abortOnVersionConflict = in.readBoolean(); size = in.readVInt(); refresh = in.readBoolean(); timeout = new TimeValue(in); activeShardCount = ActiveShardCount.readFrom(in); retryBackoffInitialTime = new TimeValue(in); maxRetries = in.readVInt(); requestsPerSecond = in.readFloat(); if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { slices = in.readVInt(); } else { slices = 1; } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); searchRequest.writeTo(out); out.writeBoolean(abortOnVersionConflict); out.writeVInt(size); out.writeBoolean(refresh); timeout.writeTo(out); activeShardCount.writeTo(out); retryBackoffInitialTime.writeTo(out); out.writeVInt(maxRetries); out.writeFloat(requestsPerSecond); if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { out.writeVInt(slices); } else { if (slices > 1) { throw new IllegalArgumentException("Attempting to send sliced reindex-style request to a node that doesn't support " + "it. Version is [" + out.getVersion() + "] but must be [" + Version.V_5_1_1_UNRELEASED + "]"); } } } /** * Append a short description of the search request to a StringBuilder. Used * to make toString. */ protected void searchToString(StringBuilder b) { if (searchRequest.indices() != null && searchRequest.indices().length != 0) { b.append(Arrays.toString(searchRequest.indices())); } else { b.append("[all indices]"); } if (searchRequest.types() != null && searchRequest.types().length != 0) { b.append(Arrays.toString(searchRequest.types())); } } @Override public String getDescription() { return this.toString(); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2019 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.databasejoin; import java.sql.ResultSet; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Use values from input streams to joins with values in a database. Freehand SQL can be used to do this. * * @author Matt * @since 26-apr-2003 */ public class DatabaseJoin extends BaseStep implements StepInterface { private static Class<?> PKG = DatabaseJoinMeta.class; // for i18n purposes, needed by Translator2!! private DatabaseJoinMeta meta; private DatabaseJoinData data; public DatabaseJoin( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } private synchronized void lookupValues( RowMetaInterface rowMeta, Object[] rowData ) throws KettleException { if ( first ) { first = false; data.outputRowMeta = rowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), new RowMetaInterface[] { meta.getTableFields(), }, null, this, repository, metaStore ); data.lookupRowMeta = new RowMeta(); if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "DatabaseJoin.Log.CheckingRow" ) + rowMeta.getString( rowData ) ); } data.keynrs = new int[meta.getParameterField().length]; for ( int i = 0; i < meta.getParameterField().length; i++ ) { data.keynrs[i] = rowMeta.indexOfValue( meta.getParameterField()[i] ); if ( data.keynrs[i] < 0 ) { throw new KettleStepException( BaseMessages.getString( PKG, "DatabaseJoin.Exception.FieldNotFound", meta .getParameterField()[i] ) ); } data.lookupRowMeta.addValueMeta( rowMeta.getValueMeta( data.keynrs[i] ).clone() ); } } // Construct the parameters row... Object[] lookupRowData = new Object[data.lookupRowMeta.size()]; for ( int i = 0; i < data.keynrs.length; i++ ) { lookupRowData[i] = rowData[data.keynrs[i]]; } // Set the values on the prepared statement (for faster exec.) ResultSet rs = data.db.openQuery( data.pstmt, data.lookupRowMeta, lookupRowData ); // Get a row from the database... // Object[] add = data.db.getRow( rs ); RowMetaInterface addMeta = data.db.getReturnRowMeta(); incrementLinesInput(); int counter = 0; while ( add != null && ( meta.getRowLimit() == 0 || counter < meta.getRowLimit() ) ) { counter++; Object[] newRow = RowDataUtil.resizeArray( rowData, data.outputRowMeta.size() ); int newIndex = rowMeta.size(); for ( int i = 0; i < addMeta.size(); i++ ) { newRow[newIndex++] = add[i]; } // we have to clone, otherwise we only get the last new value putRow( data.outputRowMeta, data.outputRowMeta.cloneRow( newRow ) ); if ( log.isRowLevel() ) { logRowlevel( BaseMessages.getString( PKG, "DatabaseJoin.Log.PutoutRow" ) + data.outputRowMeta.getString( newRow ) ); } // Get a new row if ( meta.getRowLimit() == 0 || counter < meta.getRowLimit() ) { add = data.db.getRow( rs ); incrementLinesInput(); } } // Nothing found? Perhaps we have to put something out after all? if ( counter == 0 && meta.isOuterJoin() ) { if ( data.notfound == null ) { // Just return null values for all values... // data.notfound = new Object[data.db.getReturnRowMeta().size()]; } Object[] newRow = RowDataUtil.resizeArray( rowData, data.outputRowMeta.size() ); int newIndex = rowMeta.size(); for ( int i = 0; i < data.notfound.length; i++ ) { newRow[newIndex++] = data.notfound[i]; } putRow( data.outputRowMeta, newRow ); } data.db.closeQuery( rs ); } public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (DatabaseJoinMeta) smi; data = (DatabaseJoinData) sdi; boolean sendToErrorRow = false; String errorMessage = null; Object[] r = getRow(); // Get row from input rowset & set row busy! if ( r == null ) { // no more input to be expected... setOutputDone(); return false; } try { lookupValues( getInputRowMeta(), r ); // add new values to the row in rowset[0]. if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "DatabaseJoin.Log.LineNumber" ) + getLinesRead() ); } } } catch ( KettleException e ) { if ( getStepMeta().isDoingErrorHandling() ) { sendToErrorRow = true; errorMessage = e.toString(); } else { logError( BaseMessages.getString( PKG, "DatabaseJoin.Log.ErrorInStepRunning" ) + e.getMessage(), e ); setErrors( 1 ); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } if ( sendToErrorRow ) { // Simply add this row to the error row putError( getInputRowMeta(), r, 1, errorMessage, null, "DBJOIN001" ); } } return true; } /** * Stop the running query * [PDI-17820] - In the Database Join step data.isCancelled is checked before synchronization and set after synchronization is completed. * * To cancel a prepared statement we need a valid database connection which we do not have if disposed has already been called * * * */ public synchronized void stopRunning( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { if ( this.isStopped() || sdi.isDisposed() ) { return; } meta = (DatabaseJoinMeta) smi; data = (DatabaseJoinData) sdi; if ( data.db != null && data.db.getConnection() != null && !data.isCanceled ) { data.db.cancelStatement( data.pstmt ); setStopped( true ); data.isCanceled = true; } } public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (DatabaseJoinMeta) smi; data = (DatabaseJoinData) sdi; if ( super.init( smi, sdi ) ) { if ( meta.getDatabaseMeta() == null ) { logError( BaseMessages.getString( PKG, "DatabaseJoin.Init.ConnectionMissing", getStepname() ) ); return false; } data.db = new Database( this, meta.getDatabaseMeta() ); data.db.shareVariablesWith( this ); try { if ( getTransMeta().isUsingUniqueConnections() ) { synchronized ( getTrans() ) { data.db.connect( getTrans().getTransactionId(), getPartitionID() ); } } else { data.db.connect( getPartitionID() ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "DatabaseJoin.Log.ConnectedToDB" ) ); } String sql = meta.getSql(); if ( meta.isVariableReplace() ) { sql = environmentSubstitute( sql ); } // Prepare the SQL statement data.pstmt = data.db.prepareSQL( sql ); if ( log.isDebug() ) { logDebug( BaseMessages.getString( PKG, "DatabaseJoin.Log.SQLStatement", sql ) ); } data.db.setQueryLimit( meta.getRowLimit() ); return true; } catch ( KettleException e ) { logError( BaseMessages.getString( PKG, "DatabaseJoin.Log.DatabaseError" ) + e.getMessage(), e ); if ( data.db != null ) { data.db.disconnect(); } } } return false; } public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { meta = (DatabaseJoinMeta) smi; data = (DatabaseJoinData) sdi; if ( data.db != null ) { data.db.disconnect(); } super.dispose( smi, sdi ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.LongAdder; import org.apache.camel.AsyncCallback; import org.apache.camel.AsyncProcessor; import org.apache.camel.CamelContext; import org.apache.camel.CamelContextAware; import org.apache.camel.Exchange; import org.apache.camel.ExchangePattern; import org.apache.camel.ExchangePropertyKey; import org.apache.camel.Expression; import org.apache.camel.ExtendedCamelContext; import org.apache.camel.Message; import org.apache.camel.Processor; import org.apache.camel.RuntimeCamelException; import org.apache.camel.ShutdownRunningTask; import org.apache.camel.StreamCache; import org.apache.camel.Traceable; import org.apache.camel.spi.EndpointUtilizationStatistics; import org.apache.camel.spi.IdAware; import org.apache.camel.spi.ProcessorExchangeFactory; import org.apache.camel.spi.RouteIdAware; import org.apache.camel.spi.ShutdownAware; import org.apache.camel.support.AsyncProcessorConverterHelper; import org.apache.camel.support.AsyncProcessorSupport; import org.apache.camel.support.service.ServiceHelper; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Processor for wire tapping exchanges to an endpoint destination. */ public class WireTapProcessor extends AsyncProcessorSupport implements Traceable, ShutdownAware, IdAware, RouteIdAware, CamelContextAware { private static final Logger LOG = LoggerFactory.getLogger(WireTapProcessor.class); private String id; private String routeId; private CamelContext camelContext; private final SendDynamicProcessor dynamicSendProcessor; // is only used for reporting statistics private final String uri; private final boolean dynamicUri; private final Processor processor; private final AsyncProcessor asyncProcessor; private final ExchangePattern exchangePattern; private final boolean copy; private final ExecutorService executorService; private volatile boolean shutdownExecutorService; private final LongAdder taskCount = new LongAdder(); private ProcessorExchangeFactory processorExchangeFactory; private PooledExchangeTaskFactory taskFactory; // expression or processor used for populating a new exchange to send // as opposed to traditional wiretap that sends a copy of the original exchange private Expression newExchangeExpression; private List<Processor> newExchangeProcessors; private Processor onPrepare; public WireTapProcessor(SendDynamicProcessor dynamicSendProcessor, Processor processor, String uri, ExchangePattern exchangePattern, boolean copy, ExecutorService executorService, boolean shutdownExecutorService, boolean dynamicUri) { this.dynamicSendProcessor = dynamicSendProcessor; this.uri = uri; this.processor = processor; this.asyncProcessor = AsyncProcessorConverterHelper.convert(processor); this.exchangePattern = exchangePattern; this.copy = copy; ObjectHelper.notNull(executorService, "executorService"); this.executorService = executorService; this.shutdownExecutorService = shutdownExecutorService; this.dynamicUri = dynamicUri; } private final class WireTapTask implements PooledExchangeTask, Runnable { private Exchange exchange; private final AsyncCallback callback = new AsyncCallback() { @Override public void done(boolean doneSync) { taskCount.decrement(); if (processorExchangeFactory != null) { processorExchangeFactory.release(exchange); } taskFactory.release(WireTapTask.this); } }; @Override public void prepare(Exchange exchange, AsyncCallback callback) { this.exchange = exchange; // we use our own callback } @Override public void reset() { this.exchange = null; } @Override public void run() { taskCount.increment(); LOG.debug(">>>> (wiretap) {} {}", uri, exchange); asyncProcessor.process(exchange, callback); } } @Override public String toString() { return id; } @Override public String getTraceLabel() { return "wireTap(" + uri + ")"; } @Override public String getId() { return id; } @Override public void setId(String id) { this.id = id; } @Override public String getRouteId() { return routeId; } @Override public void setRouteId(String routeId) { this.routeId = routeId; } @Override public CamelContext getCamelContext() { return camelContext; } @Override public void setCamelContext(CamelContext camelContext) { this.camelContext = camelContext; } @Override public boolean deferShutdown(ShutdownRunningTask shutdownRunningTask) { // not in use return true; } @Override public int getPendingExchangesSize() { return taskCount.intValue(); } @Override public void prepareShutdown(boolean suspendOnly, boolean forced) { // noop } public EndpointUtilizationStatistics getEndpointUtilizationStatistics() { if (dynamicSendProcessor != null) { return dynamicSendProcessor.getEndpointUtilizationStatistics(); } else { return null; } } @Override public boolean process(final Exchange exchange, final AsyncCallback callback) { if (!isStarted()) { throw new IllegalStateException("WireTapProcessor has not been started: " + this); } // must configure the wire tap beforehand Exchange target; try { target = configureExchange(exchange, exchangePattern); } catch (Exception e) { exchange.setException(e); callback.done(true); return true; } // send the exchange to the destination using an executor service try { // create task which has state used during routing PooledExchangeTask task = taskFactory.acquire(target, null); executorService.submit(task); } catch (Throwable e) { // in case the thread pool rejects or cannot submit the task then we need to catch // so camel error handler can react exchange.setException(e); } // continue routing this synchronously callback.done(true); return true; } protected Exchange configureExchange(Exchange exchange, ExchangePattern pattern) throws IOException { Exchange answer; if (copy) { // use a copy of the original exchange answer = configureCopyExchange(exchange); } else { // use a new exchange answer = configureNewExchange(exchange); } // prepare the exchange if (newExchangeExpression != null) { Object body = newExchangeExpression.evaluate(answer, Object.class); if (body != null) { answer.getIn().setBody(body); } } if (newExchangeProcessors != null) { for (Processor processor : newExchangeProcessors) { try { processor.process(answer); } catch (Exception e) { throw RuntimeCamelException.wrapRuntimeCamelException(e); } } } // if the body is a stream cache we must use a copy of the stream in the wire tapped exchange Message msg = answer.getMessage(); if (msg.getBody() instanceof StreamCache) { // in parallel processing case, the stream must be copied, therefore get the stream StreamCache cache = (StreamCache) msg.getBody(); StreamCache copied = cache.copy(answer); if (copied != null) { msg.setBody(copied); } } // invoke on prepare on the exchange if specified if (onPrepare != null) { try { onPrepare.process(answer); } catch (Exception e) { throw RuntimeCamelException.wrapRuntimeCamelException(e); } } return answer; } private Exchange configureCopyExchange(Exchange exchange) { // must use a copy as we dont want it to cause side effects of the original exchange Exchange copy = processorExchangeFactory.createCorrelatedCopy(exchange, false); // set MEP to InOnly as this wire tap is a fire and forget copy.setPattern(ExchangePattern.InOnly); // move OUT to IN if needed if (copy.hasOut()) { copy.setIn(copy.getOut()); copy.setOut(null); } // remove STREAM_CACHE_UNIT_OF_WORK property because this wire tap will // close its own created stream cache(s) copy.removeProperty(ExchangePropertyKey.STREAM_CACHE_UNIT_OF_WORK); return copy; } private Exchange configureNewExchange(Exchange exchange) { return processorExchangeFactory.create(exchange.getFromEndpoint(), ExchangePattern.InOnly); } public List<Processor> getNewExchangeProcessors() { return newExchangeProcessors; } public void setNewExchangeProcessors(List<Processor> newExchangeProcessors) { this.newExchangeProcessors = newExchangeProcessors; } public Expression getNewExchangeExpression() { return newExchangeExpression; } public void setNewExchangeExpression(Expression newExchangeExpression) { this.newExchangeExpression = newExchangeExpression; } public void addNewExchangeProcessor(Processor processor) { if (newExchangeProcessors == null) { newExchangeProcessors = new ArrayList<>(); } newExchangeProcessors.add(processor); } public boolean isCopy() { return copy; } public Processor getOnPrepare() { return onPrepare; } public void setOnPrepare(Processor onPrepare) { this.onPrepare = onPrepare; } public String getUri() { return uri; } public int getCacheSize() { if (dynamicSendProcessor != null) { return dynamicSendProcessor.getCacheSize(); } else { return 0; } } public boolean isIgnoreInvalidEndpoint() { if (dynamicSendProcessor != null) { return dynamicSendProcessor.isIgnoreInvalidEndpoint(); } else { return false; } } public boolean isDynamicUri() { return dynamicUri; } @Override protected void doBuild() throws Exception { // create a per processor exchange factory this.processorExchangeFactory = getCamelContext().adapt(ExtendedCamelContext.class) .getProcessorExchangeFactory().newProcessorExchangeFactory(this); this.processorExchangeFactory.setRouteId(getRouteId()); this.processorExchangeFactory.setId(getId()); boolean pooled = camelContext.adapt(ExtendedCamelContext.class).getExchangeFactory().isPooled(); if (pooled) { taskFactory = new PooledTaskFactory(getId()) { @Override public PooledExchangeTask create(Exchange exchange, AsyncCallback callback) { return new WireTapTask(); } }; int capacity = camelContext.adapt(ExtendedCamelContext.class).getExchangeFactory().getCapacity(); taskFactory.setCapacity(capacity); } else { taskFactory = new PrototypeTaskFactory() { @Override public PooledExchangeTask create(Exchange exchange, AsyncCallback callback) { return new WireTapTask(); } }; } LOG.trace("Using TaskFactory: {}", taskFactory); ServiceHelper.buildService(processorExchangeFactory, taskFactory, processor); } @Override protected void doInit() throws Exception { ServiceHelper.initService(processorExchangeFactory, taskFactory, processor); } @Override protected void doStart() throws Exception { ServiceHelper.startService(processorExchangeFactory, taskFactory, processor); } @Override protected void doStop() throws Exception { ServiceHelper.stopService(processorExchangeFactory, taskFactory, processor); } @Override protected void doShutdown() throws Exception { ServiceHelper.stopAndShutdownServices(processorExchangeFactory, taskFactory, processor); if (shutdownExecutorService) { getCamelContext().getExecutorServiceManager().shutdownNow(executorService); } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2018_06_01.implementation; import com.microsoft.azure.management.network.v2018_06_01.VpnConnection; import com.microsoft.azure.arm.model.implementation.CreatableUpdatableImpl; import rx.Observable; import java.util.Map; import com.microsoft.azure.SubResource; import com.microsoft.azure.management.network.v2018_06_01.VpnConnectionStatus; import java.util.List; import com.microsoft.azure.management.network.v2018_06_01.IpsecPolicy; import com.microsoft.azure.management.network.v2018_06_01.ProvisioningState; class VpnConnectionImpl extends CreatableUpdatableImpl<VpnConnection, VpnConnectionInner, VpnConnectionImpl> implements VpnConnection, VpnConnection.Definition, VpnConnection.Update { private final NetworkManager manager; private String resourceGroupName; private String gatewayName; private String connectionName; VpnConnectionImpl(String name, NetworkManager manager) { super(name, new VpnConnectionInner()); this.manager = manager; // Set resource name this.connectionName = name; // } VpnConnectionImpl(VpnConnectionInner inner, NetworkManager manager) { super(inner.name(), inner); this.manager = manager; // Set resource name this.connectionName = inner.name(); // set resource ancestor and positional variables this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups"); this.gatewayName = IdParsingUtils.getValueFromIdByName(inner.id(), "vpnGateways"); this.connectionName = IdParsingUtils.getValueFromIdByName(inner.id(), "vpnConnections"); // } @Override public NetworkManager manager() { return this.manager; } @Override public Observable<VpnConnection> createResourceAsync() { VpnConnectionsInner client = this.manager().inner().vpnConnections(); return client.createOrUpdateAsync(this.resourceGroupName, this.gatewayName, this.connectionName, this.inner()) .map(innerToFluentMap(this)); } @Override public Observable<VpnConnection> updateResourceAsync() { VpnConnectionsInner client = this.manager().inner().vpnConnections(); return client.createOrUpdateAsync(this.resourceGroupName, this.gatewayName, this.connectionName, this.inner()) .map(innerToFluentMap(this)); } @Override protected Observable<VpnConnectionInner> getInnerAsync() { VpnConnectionsInner client = this.manager().inner().vpnConnections(); return client.getAsync(this.resourceGroupName, this.gatewayName, this.connectionName); } @Override public boolean isInCreateMode() { return this.inner().id() == null; } @Override public Integer connectionBandwidthInMbps() { return this.inner().connectionBandwidthInMbps(); } @Override public VpnConnectionStatus connectionStatus() { return this.inner().connectionStatus(); } @Override public Long egressBytesTransferred() { return this.inner().egressBytesTransferred(); } @Override public Boolean enableBgp() { return this.inner().enableBgp(); } @Override public String etag() { return this.inner().etag(); } @Override public String id() { return this.inner().id(); } @Override public Long ingressBytesTransferred() { return this.inner().ingressBytesTransferred(); } @Override public List<IpsecPolicy> ipsecPolicies() { return this.inner().ipsecPolicies(); } @Override public String location() { return this.inner().location(); } @Override public String name() { return this.inner().name(); } @Override public ProvisioningState provisioningState() { return this.inner().provisioningState(); } @Override public SubResource remoteVpnSite() { return this.inner().remoteVpnSite(); } @Override public Integer routingWeight() { return this.inner().routingWeight(); } @Override public String sharedKey() { return this.inner().sharedKey(); } @Override public Map<String, String> tags() { return this.inner().getTags(); } @Override public String type() { return this.inner().type(); } @Override public VpnConnectionImpl withExistingVpnGateway(String resourceGroupName, String gatewayName) { this.resourceGroupName = resourceGroupName; this.gatewayName = gatewayName; return this; } @Override public VpnConnectionImpl withConnectionStatus(VpnConnectionStatus connectionStatus) { this.inner().withConnectionStatus(connectionStatus); return this; } @Override public VpnConnectionImpl withEnableBgp(Boolean enableBgp) { this.inner().withEnableBgp(enableBgp); return this; } @Override public VpnConnectionImpl withId(String id) { this.inner().withId(id); return this; } @Override public VpnConnectionImpl withIpsecPolicies(List<IpsecPolicy> ipsecPolicies) { this.inner().withIpsecPolicies(ipsecPolicies); return this; } @Override public VpnConnectionImpl withLocation(String location) { this.inner().withLocation(location); return this; } @Override public VpnConnectionImpl withProvisioningState(ProvisioningState provisioningState) { this.inner().withProvisioningState(provisioningState); return this; } @Override public VpnConnectionImpl withRemoteVpnSite(SubResource remoteVpnSite) { this.inner().withRemoteVpnSite(remoteVpnSite); return this; } @Override public VpnConnectionImpl withRoutingWeight(Integer routingWeight) { this.inner().withRoutingWeight(routingWeight); return this; } @Override public VpnConnectionImpl withSharedKey(String sharedKey) { this.inner().withSharedKey(sharedKey); return this; } @Override public VpnConnectionImpl withTags(Map<String, String> tags) { this.inner().withTags(tags); return this; } }
// // ======================================================================== // Copyright (c) 1995-2015 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.deploy.graph; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.text.CollationKey; import java.text.Collator; import java.util.Comparator; import java.util.Set; import java.util.TreeSet; import org.eclipse.jetty.util.IO; /** * Output the Graph in GraphViz Dot format. */ public class GraphOutputDot { private GraphOutputDot() { } private static final String TOPNODE = "undeployed"; /** * Comparator that makes the 'undeployed' node the first node in the sort list. * * This makes the 'undeployed' node show up at the top of the generated graph. */ private static class TopNodeSort implements Comparator<Node> { private Collator collator = Collator.getInstance(); public int compare(Node o1, Node o2) { if (o1.getName().equals(TOPNODE)) { return -1; } if (o2.getName().equals(TOPNODE)) { return 1; } CollationKey key1 = toKey(o1); CollationKey key2 = toKey(o2); return key1.compareTo(key2); } private CollationKey toKey(Node node) { return collator.getCollationKey(node.getName()); } } public static void write(Graph graph, File outputFile) throws IOException { FileWriter writer = null; PrintWriter out = null; try { writer = new FileWriter(outputFile); out = new PrintWriter(writer); out.println("// Autogenerated by " + GraphOutputDot.class.getName()); out.println("digraph Graf {"); writeGraphDefaults(out); writeNodeDefaults(out); writeEdgeDefaults(out); Set<Node> nodes = new TreeSet<Node>(new TopNodeSort()); nodes.addAll(graph.getNodes()); for (Node node : nodes) { writeNode(out,node); } for (Edge edge : graph.getEdges()) { writeEdge(out,edge); } out.println("}"); } finally { IO.close(out); IO.close(writer); } } private static void writeEdge(PrintWriter out, Edge edge) { out.println(); out.println(" // Edge"); out.printf(" \"%s\" -> \"%s\" [%n",toId(edge.getFrom()),toId(edge.getTo())); out.println(" arrowtail=none,"); out.println(" arrowhead=normal"); out.println(" ];"); } private static void writeNode(PrintWriter out, Node node) { out.println(); out.println(" // Node"); out.printf(" \"%s\" [%n",toId(node)); out.printf(" label=\"%s\",%n",node.getName()); if (node.getName().endsWith("ed")) { out.println(" color=\"#ddddff\","); out.println(" style=filled,"); } out.println(" shape=box"); out.println(" ];"); } private static CharSequence toId(Node node) { StringBuilder buf = new StringBuilder(); for (char c : node.getName().toCharArray()) { if (Character.isLetter(c)) { buf.append(c); continue; } if (Character.isDigit(c)) { buf.append(c); continue; } if ((c == ' ') || (c == '-') || (c == '_')) { buf.append(c); continue; } } return buf; } private static void writeEdgeDefaults(PrintWriter out) { out.println(); out.println(" // Edge Defaults "); out.println(" edge ["); out.println(" arrowsize=\"0.8\","); out.println(" fontsize=\"11\""); out.println(" ];"); } private static void writeGraphDefaults(PrintWriter out) { out.println(); out.println(" // Graph Defaults "); out.println(" graph ["); out.println(" bgcolor=\"#ffffff\","); out.println(" fontname=\"Helvetica\","); out.println(" fontsize=\"11\","); out.println(" label=\"Graph\","); out.println(" labeljust=\"l\","); out.println(" rankdir=\"TD\""); out.println(" ];"); } private static void writeNodeDefaults(PrintWriter out) { out.println(); out.println(" // Node Defaults "); out.println(" node ["); out.println(" fontname=\"Helvetica\","); out.println(" fontsize=\"11\","); out.println(" shap=\"box\""); out.println(" ];"); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.repository; import org.apache.commons.io.IOUtils; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mockito; import org.pentaho.di.core.Const; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogChannel; import java.io.File; import java.io.InputStream; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class RepositoriesMetaTest { @BeforeClass public static void setUpClass() throws Exception { if ( !KettleEnvironment.isInitialized() ) { KettleEnvironment.init(); } } @Test public void testToString() throws Exception { RepositoriesMeta repositoriesMeta = new RepositoriesMeta(); assertEquals( "RepositoriesMeta", repositoriesMeta.toString() ); } @Test public void testReadData() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.getKettleUserRepositoriesFile() ).thenReturn( getClass().getResource( "repositories.xml" ).getPath() ); when( spy.newLogChannel() ).thenReturn( log ); spy.readData(); String repositoriesXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + Const.CR + "<repositories>" + Const.CR + " <connection>" + Const.CR + " <name>local postgres</name>" + Const.CR + " <server>localhost</server>" + Const.CR + " <type>POSTGRESQL</type>" + Const.CR + " <access>Native</access>" + Const.CR + " <database>hibernate</database>" + Const.CR + " <port>5432</port>" + Const.CR + " <username>auser</username>" + Const.CR + " <password>Encrypted 2be98afc86aa7f285bb18bd63c99dbdde</password>" + Const.CR + " <servername/>" + Const.CR + " <data_tablespace/>" + Const.CR + " <index_tablespace/>" + Const.CR + " <attributes>" + Const.CR + " <attribute><code>FORCE_IDENTIFIERS_TO_LOWERCASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>FORCE_IDENTIFIERS_TO_UPPERCASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>IS_CLUSTERED</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>PORT_NUMBER</code><attribute>5432</attribute></attribute>" + Const.CR + " <attribute><code>PRESERVE_RESERVED_WORD_CASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>QUOTE_ALL_FIELDS</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>SUPPORTS_BOOLEAN_DATA_TYPE</code><attribute>Y</attribute></attribute>" + Const.CR + " <attribute><code>SUPPORTS_TIMESTAMP_DATA_TYPE</code><attribute>Y</attribute></attribute>" + Const.CR + " <attribute><code>USE_POOLING</code><attribute>N</attribute></attribute>" + Const.CR + " </attributes>" + Const.CR + " </connection>" + Const.CR + " <repository> <id>KettleFileRepository</id>" + Const.CR + " <name>Test Repository</name>" + Const.CR + " <description>Test Repository Description</description>" + Const.CR + " <is_default>false</is_default>" + Const.CR + " <base_directory>test-repository</base_directory>" + Const.CR + " <read_only>N</read_only>" + Const.CR + " <hides_hidden_files>N</hides_hidden_files>" + Const.CR + " </repository> </repositories>" + Const.CR; assertEquals( repositoriesXml, spy.getXML() ); RepositoriesMeta clone = spy.clone(); assertEquals( repositoriesXml, spy.getXML() ); assertNotSame( clone, spy ); assertEquals( 1, spy.nrRepositories() ); RepositoryMeta repository = spy.getRepository( 0 ); assertEquals( "Test Repository", repository.getName() ); assertEquals( "Test Repository Description", repository.getDescription() ); assertEquals( " <repository> <id>KettleFileRepository</id>" + Const.CR + " <name>Test Repository</name>" + Const.CR + " <description>Test Repository Description</description>" + Const.CR + " <is_default>false</is_default>" + Const.CR + " <base_directory>test-repository</base_directory>" + Const.CR + " <read_only>N</read_only>" + Const.CR + " <hides_hidden_files>N</hides_hidden_files>" + Const.CR + " </repository>", repository.getXML() ); assertSame( repository, spy.searchRepository( "Test Repository" ) ); assertSame( repository, spy.findRepositoryById( "KettleFileRepository" ) ); assertSame( repository, spy.findRepository( "Test Repository" ) ); assertNull( spy.findRepository( "not found" ) ); assertNull( spy.findRepositoryById( "not found" ) ); assertEquals( 0, spy.indexOfRepository( repository ) ); spy.removeRepository( 0 ); assertEquals( 0, spy.nrRepositories() ); assertNull( spy.searchRepository( "Test Repository" ) ); spy.addRepository( 0, repository ); assertEquals( 1, spy.nrRepositories() ); spy.removeRepository( 1 ); assertEquals( 1, spy.nrRepositories() ); assertEquals( 1, spy.nrDatabases() ); assertEquals( "local postgres", spy.getDatabase( 0 ).getName() ); DatabaseMeta searchDatabase = spy.searchDatabase( "local postgres" ); assertSame( searchDatabase, spy.getDatabase( 0 ) ); assertEquals( 0, spy.indexOfDatabase( searchDatabase ) ); spy.removeDatabase( 0 ); assertEquals( 0, spy.nrDatabases() ); assertNull( spy.searchDatabase( "local postgres" ) ); spy.addDatabase( 0, searchDatabase ); assertEquals( 1, spy.nrDatabases() ); spy.removeDatabase( 1 ); assertEquals( 1, spy.nrDatabases() ); assertEquals( "Unable to read repository with id [junk]. RepositoryMeta is not available.", spy.getErrorMessage() ); } @Test public void testNothingToRead() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.getKettleUserRepositoriesFile() ).thenReturn( "filedoesnotexist.xml" ); when( spy.newLogChannel() ).thenReturn( log ); assertTrue( spy.readData() ); assertEquals( 0, spy.nrDatabases() ); assertEquals( 0, spy.nrRepositories() ); } @Test public void testReadDataFromInputStream() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.newLogChannel() ).thenReturn( log ); InputStream inputStream = getClass().getResourceAsStream( "repositories.xml" ); spy.readDataFromInputStream( inputStream ); assertEquals( 1, spy.nrDatabases() ); assertEquals( 1, spy.nrRepositories() ); } @Test public void testErrorReadingInputStream() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.newLogChannel() ).thenReturn( log ); try { spy.readDataFromInputStream( getClass().getResourceAsStream( "filedoesnotexist.xml" ) ); } catch ( KettleException e ) { assertEquals( Const.CR + "Error reading information from file:" + Const.CR + "InputStream cannot be null" + Const.CR, e.getMessage() ); } } @Test public void testErrorReadingFile() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.newLogChannel() ).thenReturn( log ); when( spy.getKettleUserRepositoriesFile() ).thenReturn( getClass().getResource( "bad-repositories.xml" ).getPath() ); try { spy.readData(); } catch ( KettleException e ) { assertEquals( Const.CR + "Error reading information from file:" + Const.CR + "The element type \"repositories\" must be terminated by the matching end-tag \"</repositories>\"." + Const.CR, e.getMessage() ); } } @Test public void testWriteFile() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.newLogChannel() ).thenReturn( log ); String path = getClass().getResource( "repositories.xml" ).getPath().replace( "repositories.xml", "new-repositories.xml" ); when( spy.getKettleUserRepositoriesFile() ).thenReturn( path ); spy.writeData(); InputStream resourceAsStream = getClass().getResourceAsStream( "new-repositories.xml" ); assertEquals( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + Const.CR + "<repositories>" + Const.CR + " </repositories>" + Const.CR, IOUtils.toString( resourceAsStream ) ); new File( path ).delete(); } @Test public void testErrorWritingFile() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.newLogChannel() ).thenReturn( log ); when( spy.getKettleUserRepositoriesFile() ).thenReturn( null ); try { spy.writeData(); } catch ( KettleException e ) { assertTrue( e.getMessage().startsWith( Const.CR + "Error writing repositories metadata" ) ); } } }
/* * Copyright 2014-2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.bgpio.protocol; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.junit.Test; import org.onosproject.bgpio.exceptions.BgpParseException; import org.onosproject.bgpio.types.BgpHeader; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.Is.is; /** * Test cases for BGP Open Message. */ public class BgpOpenMsgTest { /** * This test case checks open message without optional parameter. */ @Test public void openMessageTest1() throws BgpParseException { //Open message without optional parameter byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00, 0x1d, 0x01, 0X04, (byte) 0xfe, 0x09, 0x00, (byte) 0xb4, (byte) 0xc0, (byte) 0xa8, 0x00, 0x0f, 0x00}; byte[] testOpenMsg; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(openMsg); BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader(); BgpMessage message; BgpHeader bgpHeader = new BgpHeader(); message = reader.readFrom(buffer, bgpHeader); assertThat(message, instanceOf(BgpOpenMsg.class)); ChannelBuffer buf = ChannelBuffers.dynamicBuffer(); message.writeTo(buf); int readLen = buf.writerIndex(); testOpenMsg = new byte[readLen]; buf.readBytes(testOpenMsg, 0, readLen); assertThat(testOpenMsg, is(openMsg)); } /** * This test case checks open message with Multiprotocol extension * capability. */ @Test public void openMessageTest2() throws BgpParseException { // OPEN Message (MultiProtocolExtension-CAPABILITY). byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00, 0x25, 0x01, //BGP Header 0X04, //Version (byte) 0x00, (byte) 0xc8, // AS Number 0x00, (byte) 0xb4, // Hold time (byte) 0xb6, (byte) 0x02, 0x5d, (byte) 0xc8, // BGP Identifier 0x08, 0x02, 0x06, // Opt Parameter length 0x01, 0x04, 0x00, 0x00, 0x00, (byte) 0xc8}; // Multiprotocol CAPABILITY byte[] testOpenMsg; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(openMsg); BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader(); BgpMessage message; BgpHeader bgpHeader = new BgpHeader(); message = reader.readFrom(buffer, bgpHeader); assertThat(message, instanceOf(BgpOpenMsg.class)); ChannelBuffer buf = ChannelBuffers.dynamicBuffer(); message.writeTo(buf); int readLen = buf.writerIndex(); testOpenMsg = new byte[readLen]; buf.readBytes(testOpenMsg, 0, readLen); assertThat(testOpenMsg, is(openMsg)); } /** * This test case checks open message with Four-octet AS number * capability. */ @Test public void openMessageTest3() throws BgpParseException { // OPEN Message (Four-Octet AS number capability). byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00, 0x25, 0x01, //BGPHeader 0X04, //Version (byte) 0x00, (byte) 0xc8, //AS Number 0x00, (byte) 0xb4, //Hold Time (byte) 0xb6, (byte) 0x02, 0x5d, (byte) 0xc8, //BGP Identifier 0x08, 0x02, 0x06, //Opt Parameter Length 0x41, 0x04, 0x00, 0x01, 0x00, 0x01}; //Four Octet AS Number-CAPABILITY-TLV byte[] testOpenMsg; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(openMsg); BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader(); BgpMessage message; BgpHeader bgpHeader = new BgpHeader(); message = reader.readFrom(buffer, bgpHeader); assertThat(message, instanceOf(BgpOpenMsg.class)); ChannelBuffer buf = ChannelBuffers.dynamicBuffer(); message.writeTo(buf); int readLen = buf.writerIndex(); testOpenMsg = new byte[readLen]; buf.readBytes(testOpenMsg, 0, readLen); assertThat(testOpenMsg, is(openMsg)); } /** * This test case checks open message with capabilities. */ @Test public void openMessageTest4() throws BgpParseException { // OPEN Message with capabilities. byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00, 0x2b, 0x01, //BGPHeader 0X04, //Version (byte) 0x00, (byte) 0xc8, //AS Number 0x00, (byte) 0xb4, //Hold Time (byte) 0xb6, (byte) 0x02, 0x5d, (byte) 0xc8, //BGP Identifier 0x0e, 0x02, 0x0c, //Opt Parameter Length 0x01, 0x04, 0x00, 0x00, 0x00, (byte) 0xc8, // Multiprotocol extension capability 0x41, 0x04, 0x00, 0x01, 0x00, 0x01}; //Four Octet AS Number-CAPABILITY-TLV byte[] testOpenMsg; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(openMsg); BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader(); BgpMessage message; BgpHeader bgpHeader = new BgpHeader(); message = reader.readFrom(buffer, bgpHeader); assertThat(message, instanceOf(BgpOpenMsg.class)); ChannelBuffer buf = ChannelBuffers.dynamicBuffer(); message.writeTo(buf); int readLen = buf.writerIndex(); testOpenMsg = new byte[readLen]; buf.readBytes(testOpenMsg, 0, readLen); assertThat(testOpenMsg, is(openMsg)); } /** * In this test case, Invalid version is given as input and expecting * an exception. */ @Test(expected = BgpParseException.class) public void openMessageTest5() throws BgpParseException { // OPEN Message with invalid version number. byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00, 0x1d, 0x01, 0X05, (byte) 0xfe, 0x09, 0x00, (byte) 0xb4, (byte) 0xc0, (byte) 0xa8, 0x00, 0x0f, 0x00}; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(openMsg); BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader(); BgpMessage message; BgpHeader bgpHeader = new BgpHeader(); message = reader.readFrom(buffer, bgpHeader); assertThat(message, instanceOf(BgpOpenMsg.class)); } /** * In this test case, Marker is set as 0 in input and expecting * an exception. */ @Test(expected = BgpParseException.class) public void openMessageTest6() throws BgpParseException { // OPEN Message with marker set to 0. byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0x00, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00, 0x00, 0x1d, 0x01, 0X04, (byte) 0xfe, 0x09, 0x00, (byte) 0xb4, (byte) 0xc0, (byte) 0xa8, 0x00, 0x0f, 0x00}; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(openMsg); BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader(); BgpMessage message; BgpHeader bgpHeader = new BgpHeader(); message = reader.readFrom(buffer, bgpHeader); assertThat(message, instanceOf(BgpOpenMsg.class)); } /** * In this test case, Invalid message length is given as input and expecting * an exception. */ @Test(expected = BgpParseException.class) public void openMessageTest7() throws BgpParseException { // OPEN Message with invalid header length. byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00, 0x1e, 0x01, 0X04, (byte) 0xfe, 0x09, 0x00, (byte) 0xb4, (byte) 0xc0, (byte) 0xa8, 0x00, 0x0f, 0x00}; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(openMsg); BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader(); BgpMessage message; BgpHeader bgpHeader = new BgpHeader(); message = reader.readFrom(buffer, bgpHeader); assertThat(message, instanceOf(BgpOpenMsg.class)); } /** * In this test case, Invalid message type is given as input and expecting * an exception. */ @Test(expected = BgpParseException.class) public void openMessageTest8() throws BgpParseException { // OPEN Message with invalid message type. byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00, 0x1d, 0x05, 0X04, (byte) 0xfe, 0x09, 0x00, (byte) 0xb4, (byte) 0xc0, (byte) 0xa8, 0x00, 0x0f, 0x00}; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(openMsg); BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader(); BgpMessage message; BgpHeader bgpHeader = new BgpHeader(); message = reader.readFrom(buffer, bgpHeader); assertThat(message, instanceOf(BgpOpenMsg.class)); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/billing/v1/cloud_catalog.proto package com.google.cloud.billing.v1; /** * * * <pre> * Request message for `ListServices`. * </pre> * * Protobuf type {@code google.cloud.billing.v1.ListServicesRequest} */ public final class ListServicesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.billing.v1.ListServicesRequest) ListServicesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListServicesRequest.newBuilder() to construct. private ListServicesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListServicesRequest() { pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListServicesRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListServicesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { pageSize_ = input.readInt32(); break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.billing.v1.CloudCatalogProto .internal_static_google_cloud_billing_v1_ListServicesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.billing.v1.CloudCatalogProto .internal_static_google_cloud_billing_v1_ListServicesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.billing.v1.ListServicesRequest.class, com.google.cloud.billing.v1.ListServicesRequest.Builder.class); } public static final int PAGE_SIZE_FIELD_NUMBER = 1; private int pageSize_; /** * * * <pre> * Requested page size. Defaults to 5000. * </pre> * * <code>int32 page_size = 1;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 2; private volatile java.lang.Object pageToken_; /** * * * <pre> * A token identifying a page of results to return. This should be a * `next_page_token` value returned from a previous `ListServices` * call. If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 2;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results to return. This should be a * `next_page_token` value returned from a previous `ListServices` * call. If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 2;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (pageSize_ != 0) { output.writeInt32(1, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.billing.v1.ListServicesRequest)) { return super.equals(obj); } com.google.cloud.billing.v1.ListServicesRequest other = (com.google.cloud.billing.v1.ListServicesRequest) obj; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.billing.v1.ListServicesRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.billing.v1.ListServicesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.billing.v1.ListServicesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.billing.v1.ListServicesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.billing.v1.ListServicesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.billing.v1.ListServicesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.billing.v1.ListServicesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.billing.v1.ListServicesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.billing.v1.ListServicesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.billing.v1.ListServicesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.billing.v1.ListServicesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.billing.v1.ListServicesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.billing.v1.ListServicesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for `ListServices`. * </pre> * * Protobuf type {@code google.cloud.billing.v1.ListServicesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.billing.v1.ListServicesRequest) com.google.cloud.billing.v1.ListServicesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.billing.v1.CloudCatalogProto .internal_static_google_cloud_billing_v1_ListServicesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.billing.v1.CloudCatalogProto .internal_static_google_cloud_billing_v1_ListServicesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.billing.v1.ListServicesRequest.class, com.google.cloud.billing.v1.ListServicesRequest.Builder.class); } // Construct using com.google.cloud.billing.v1.ListServicesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.billing.v1.CloudCatalogProto .internal_static_google_cloud_billing_v1_ListServicesRequest_descriptor; } @java.lang.Override public com.google.cloud.billing.v1.ListServicesRequest getDefaultInstanceForType() { return com.google.cloud.billing.v1.ListServicesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.billing.v1.ListServicesRequest build() { com.google.cloud.billing.v1.ListServicesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.billing.v1.ListServicesRequest buildPartial() { com.google.cloud.billing.v1.ListServicesRequest result = new com.google.cloud.billing.v1.ListServicesRequest(this); result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.billing.v1.ListServicesRequest) { return mergeFrom((com.google.cloud.billing.v1.ListServicesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.billing.v1.ListServicesRequest other) { if (other == com.google.cloud.billing.v1.ListServicesRequest.getDefaultInstance()) return this; if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.billing.v1.ListServicesRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.billing.v1.ListServicesRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int pageSize_; /** * * * <pre> * Requested page size. Defaults to 5000. * </pre> * * <code>int32 page_size = 1;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Requested page size. Defaults to 5000. * </pre> * * <code>int32 page_size = 1;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * * * <pre> * Requested page size. Defaults to 5000. * </pre> * * <code>int32 page_size = 1;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results to return. This should be a * `next_page_token` value returned from a previous `ListServices` * call. If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 2;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results to return. This should be a * `next_page_token` value returned from a previous `ListServices` * call. If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 2;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results to return. This should be a * `next_page_token` value returned from a previous `ListServices` * call. If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 2;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results to return. This should be a * `next_page_token` value returned from a previous `ListServices` * call. If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results to return. This should be a * `next_page_token` value returned from a previous `ListServices` * call. If unspecified, the first page of results is returned. * </pre> * * <code>string page_token = 2;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.billing.v1.ListServicesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.billing.v1.ListServicesRequest) private static final com.google.cloud.billing.v1.ListServicesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.billing.v1.ListServicesRequest(); } public static com.google.cloud.billing.v1.ListServicesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListServicesRequest> PARSER = new com.google.protobuf.AbstractParser<ListServicesRequest>() { @java.lang.Override public ListServicesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListServicesRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListServicesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListServicesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.billing.v1.ListServicesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2010 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.ascan; import java.awt.EventQueue; import java.util.ArrayList; import java.util.Collections; import java.util.ConcurrentModificationException; import java.util.Date; import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import javax.swing.DefaultListModel; import org.apache.log4j.Logger; import org.parosproxy.paros.core.scanner.Alert; import org.parosproxy.paros.core.scanner.HostProcess; import org.parosproxy.paros.core.scanner.ScannerListener; import org.parosproxy.paros.core.scanner.ScannerParam; import org.parosproxy.paros.db.DatabaseException; import org.parosproxy.paros.model.HistoryReference; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.model.SiteNode; import org.parosproxy.paros.network.ConnectionParam; import org.parosproxy.paros.network.HttpMalformedHeaderException; import org.parosproxy.paros.network.HttpMessage; import org.parosproxy.paros.view.View; import org.zaproxy.zap.extension.ruleconfig.RuleConfigParam; import org.zaproxy.zap.model.GenericScanner2; import org.zaproxy.zap.model.Target; public class ActiveScan extends org.parosproxy.paros.core.scanner.Scanner implements GenericScanner2, ScannerListener { /** The maximum number of statistic history records cached */ private static final int MAX_STATS_HISTORY_SIZE = 240; public static enum State { NOT_STARTED, RUNNING, PAUSED, FINISHED }; private String displayName = null; private int progress = 0; private ActiveScanTableModel messagesTableModel = new ActiveScanTableModel(); private FilterMessageTableModel filterMessageTableModel = new FilterMessageTableModel(); private SiteNode startNode = null; private ResponseCountSnapshot rcTotals = new ResponseCountSnapshot(); private ResponseCountSnapshot rcLastSnapshot = new ResponseCountSnapshot(); private List<ResponseCountSnapshot> rcHistory = new ArrayList<ResponseCountSnapshot>(); private Date timeStarted = null; private Date timeFinished = null; private int maxResultsToList = 0; private final List<Integer> hRefs = Collections.synchronizedList(new ArrayList<Integer>()); private final List<Integer> alerts = Collections.synchronizedList(new ArrayList<Integer>()); private ScheduledExecutorService scheduler; private ScheduledFuture<?> schedHandle; private static final Logger log = Logger.getLogger(ActiveScan.class); @Deprecated public ActiveScan( String displayName, ScannerParam scannerParam, ConnectionParam param, ScanPolicy scanPolicy) { this(displayName, scannerParam, param, scanPolicy, null); } public ActiveScan( String displayName, ScannerParam scannerParam, ConnectionParam param, ScanPolicy scanPolicy, RuleConfigParam ruleConfigParam) { super(scannerParam, param, scanPolicy, ruleConfigParam); this.displayName = displayName; this.maxResultsToList = scannerParam.getMaxResultsToList(); // Easiest way to get the messages and alerts ;) this.addScannerListener(this); } @Override public int getMaximum() { return 100; } @Override public int getProgress() { return progress; } @Override public boolean isRunning() { return !this.isStop(); } @Override public boolean isStopped() { return super.isStop(); } @Override public void pauseScan() { if (this.isRunning()) { super.pause(); } } public int getTotalRequests() { int total = 0; for (HostProcess process : this.getHostProcesses()) { total += process.getRequestCount(); } return total; } public int getTotalNewAlerts() { int totalNewAlerts = 0; for (HostProcess process : this.getHostProcesses()) { totalNewAlerts += process.getNewAlertCount(); } return totalNewAlerts; } public ResponseCountSnapshot getRequestHistory() { if (this.rcHistory.size() > 0) { try { return this.rcHistory.remove(0); } catch (Exception e) { // Ignore - another thread must have just removed the last snapshot } } return null; } @Override public void start(Target target) { reset(); this.timeStarted = new Date(); this.progress = 0; final int period = 2; super.start(target); if (View.isInitialised()) { scheduler = Executors.newScheduledThreadPool(1); // For now this is only supported in the desktop UI final Runnable requestCounter = new Runnable() { public void run() { if (isStop()) { schedHandle.cancel(true); return; } ResponseCountSnapshot currentSnapshot = rcTotals.clone(); rcHistory.add(currentSnapshot.getDifference(rcLastSnapshot)); if (rcHistory.size() > MAX_STATS_HISTORY_SIZE) { // Trim it to prevent it from getting too big rcHistory.remove(0); } rcLastSnapshot = currentSnapshot; } }; schedHandle = scheduler.scheduleWithFixedDelay( requestCounter, period, period, TimeUnit.SECONDS); } } @Override public void stopScan() { super.stop(); if (schedHandle != null) { schedHandle.cancel(true); } } @Override public void resumeScan() { if (this.isPaused()) { super.resume(); } } @Override public void alertFound(Alert alert) { int alertId = alert.getAlertId(); if (alertId != -1) { alerts.add(alert.getAlertId()); } } @Override public void hostComplete(int id, String hostAndPort) {} @Override public void hostNewScan(int id, String hostAndPort, HostProcess hostThread) {} @Override public void hostProgress(int id, String hostAndPort, String msg, int percentage) { // Calculate the percentage based on the average of all of the host processes // This is an approximation as different host process make significantly different times int tot = 0; for (HostProcess process : this.getHostProcesses()) { tot += process.getPercentageComplete(); } int latestProgress = tot / this.getHostProcesses().size(); if (latestProgress != this.progress) { this.progress = latestProgress; ActiveScanEventPublisher.publishScanProgressEvent(this.getId(), this.progress); } } @Override public void filteredMessage(HttpMessage msg, String reason) { filterMessageTableModel.addResult(msg.getRequestHeader().getURI().toString(), reason); } /** * @deprecated (2.5.0) No longer used/needed, the request count is automatically * updated/maintained by {@link HostProcess}. */ @Deprecated public void updatePluginRequestCounts() { // No longer used. } @Override public void scannerComplete(int id) { this.timeFinished = new Date(); if (scheduler != null) { scheduler.shutdown(); } } // @Override public DefaultListModel<HistoryReference> getList() { return null; } FilterMessageTableModel getFilterMessageTableModel() { return filterMessageTableModel; } public ActiveScanTableModel getMessagesTableModel() { return messagesTableModel; } @Override public void notifyNewMessage(final HttpMessage msg) { HistoryReference hRef = msg.getHistoryRef(); if (hRef == null) { try { hRef = new HistoryReference( Model.getSingleton().getSession(), HistoryReference.TYPE_SCANNER_TEMPORARY, msg); msg.setHistoryRef(null); hRefs.add(hRef.getHistoryId()); } catch (HttpMalformedHeaderException | DatabaseException e) { log.error(e.getMessage(), e); } } else { hRefs.add(hRef.getHistoryId()); } this.rcTotals.incResponseCodeCount(msg.getResponseHeader().getStatusCode()); if (hRef != null && View.isInitialised()) { // Very large lists significantly impact the UI responsiveness // limiting them makes large scans _much_ quicker if (this.rcTotals.getTotal() > this.maxResultsToList) { removeFirstHistoryReferenceInEdt(); } addHistoryReferenceInEdt(hRef); } } private void addHistoryReferenceInEdt(final HistoryReference hRef) { EventQueue.invokeLater( new Runnable() { @Override public void run() { messagesTableModel.addHistoryReference(hRef); } }); } private void removeFirstHistoryReferenceInEdt() { EventQueue.invokeLater( new Runnable() { @Override public void run() { messagesTableModel.removeHistoryReference( getMessagesTableModel().getEntry(0).getHistoryReference()); } }); } @Override public SiteNode getStartNode() { return this.startNode; } @Override public void setStartNode(SiteNode startNode) { this.startNode = startNode; super.setStartNode(startNode); } public void reset() { if (!View.isInitialised() || EventQueue.isDispatchThread()) { this.messagesTableModel.clear(); } else { EventQueue.invokeLater( new Runnable() { @Override public void run() { reset(); } }); } } public Date getTimeStarted() { return timeStarted; } public Date getTimeFinished() { return timeFinished; } /** * Returns the IDs of all messages sent/created during the scan. The message must be recreated * with a HistoryReference. * * <p><strong>Note:</strong> Iterations must be {@code synchronized} on returned object. Failing * to do so might result in {@code ConcurrentModificationException}. * * @return the IDs of all the messages sent/created during the scan * @see HistoryReference * @see ConcurrentModificationException */ public List<Integer> getMessagesIds() { return hRefs; } /** * Returns the IDs of all alerts raised during the scan. * * <p><strong>Note:</strong> Iterations must be {@code synchronized} on returned object. Failing * to do so might result in {@code ConcurrentModificationException}. * * @return the IDs of all the alerts raised during the scan * @see ConcurrentModificationException */ public List<Integer> getAlertsIds() { return alerts; } public State getState() { if (this.timeStarted == null) { return State.NOT_STARTED; } else if (this.isStop()) { return State.FINISHED; } else if (this.isPaused()) { return State.PAUSED; } else { return State.RUNNING; } } @Override public void setDisplayName(String name) { this.displayName = name; } @Override public String getDisplayName() { return this.displayName; } @Override public void setScanId(int id) { this.setId(id); } @Override public int getScanId() { return this.getId(); } }
/* =========================================================================== Copyright 2002-2010 Martin Dvorak Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =========================================================================== */ package com.emental.mindraider.core.rdf; import java.io.File; import org.apache.log4j.Logger; import com.emental.mindraider.core.MindRaider; import com.hp.hpl.jena.ontology.ObjectProperty; import com.hp.hpl.jena.ontology.OntClass; import com.hp.hpl.jena.ontology.OntModel; import com.mindcognition.mindraider.MindRaiderException; import com.mindcognition.mindraider.commons.MindRaiderConstants; import com.mindcognition.mindraider.utils.Utils; /** * This class is custodian of RDF models and OWL ontologies. * * It allows registering of models to be synchronized and fetched for * off-line processing/reading etc. It also includes helpers and auxiliary * methods. * * @author Martin.Dvorak * @see {@link RdfModel} and {@link OwlOntology} * @version $Revision: 1.4 $ ($Author: mindraider $) */ public class RdfCustodian { /** * Logger for this class. */ private static final Logger logger = Logger.getLogger(RdfCustodian.class); /* * well known ontologies (file names) */ /** * tags ontology */ public static final String FILENAME_TAGS_ONTOLOGY = "tags.owl.xml"; /** * MR resources ontology */ public static final String FILENAME_MR_ONTOLOGY = "mindraider.owl.xml"; /* * custodian */ /** * Location of RDF models. */ private String modelsNest; /** * Location of OWL ontologies. */ private String ontologiesNest; /** * Constructor. */ public RdfCustodian() { // models directory this.modelsNest = Utils.normalizePath(MindRaider.profile .getHomeDirectory() + File.separator + MindRaiderConstants.MR_DIR_MODELS); Utils.createDirectory(modelsNest); // off-line models directory this.ontologiesNest = Utils.normalizePath(MindRaider.profile .getHomeDirectory() + File.separator + MindRaiderConstants.MR_DIR_ONTOLOGIES); Utils.createDirectory(ontologiesNest); // check whether important models and ontologies exist - if not, create them // tag ontology if(!existsOntology(FILENAME_TAGS_ONTOLOGY)) { logger.debug("Creating initial tag ontology..."); OwlOntology tagOntology = createTagOntology(); try { saveOntology(FILENAME_TAGS_ONTOLOGY, tagOntology); } catch (MindRaiderException e) { logger.error("Unable to save tag ontology!",e); } } // MR ontology (types of resources) if(!existsOntology(FILENAME_MR_ONTOLOGY)) { logger.debug("Creating MR ontology..."); OwlOntology mrOntology=createMindRaiderOntology(); try { saveOntology(FILENAME_MR_ONTOLOGY, mrOntology); } catch (MindRaiderException e) { logger.error("Unable to save MindRaider ontology!", e); } } } /** * Save RDF model with the given filename. * * @param filename name of the model file (not the path) */ public void saveModel(String filename, RdfModel model) { } /** * Save ontology with the given filename. * * @param filename name of the ontology file (not the path) * @throws MindRaiderException */ public void saveOntology(String filename, OwlOntology ontology) throws MindRaiderException { ontology.setFilename(ontologiesNest+File.separator+filename); ontology.save(); } /** * Load ontology from the file with given filename. * * @param filename * @return */ public OwlOntology loadOntology(String filename) { OwlOntology owlOntology = new OwlOntology(); final String ontologyFile = ontologiesNest+File.separator+filename; logger.debug("Loading ontology: "+ontologyFile); // {{debug}} owlOntology.setFilename(ontologyFile); owlOntology.load(); return owlOntology; } /** * Check whether model exists. * * @param filename * @return */ public boolean existsModel(String filename) { File target=new File(modelsNest+File.separator+filename); return target.exists(); } /** * Check whether ontology exists. * * @param filename * @return */ public boolean existsOntology(String filename) { File target=new File(ontologiesNest+File.separator+filename); return target.exists(); } /** * Get MR models directory. * * @return the MR models directory */ public String getModelsDirectory() { return modelsNest; } /** * Get MR ontology directory. * * @return the MR ontology directory */ public String getOntologiesDirectory() { return ontologiesNest; } /* * Off-line models caching */ /** * Synchronize all registered resources - remote notebooks, RDFs (FOAF), etc. */ public void synchronize() { // TODO to be implemented } /** * Register model for off-line caching. * * @param url * the url to register. * @param name * the name to which register * @todo to be implemented */ public void register(String url, String name) { // TODO to be implemented } /** * Unregister model. * * @param url * the url to unregister. * @todo to be implemented */ public void unregister(String url) { // TODO to be implemented } /* * ontology builders */ /** * Create initial (empty) version of tags ontology. */ private OwlOntology createTagOntology() { OwlOntology tagOntology = new OwlOntology(); OntModel ontology = tagOntology.getOntology(); // merge in former flags ontology and build tags hierarchy in parallel String flagUris[]={ MindRaiderConstants.MR_OWL_FLAG_IMPORTANT, MindRaiderConstants.MR_OWL_FLAG_COOL, MindRaiderConstants.MR_OWL_FLAG_LATER, MindRaiderConstants.MR_OWL_FLAG_OBSOLETE, MindRaiderConstants.MR_OWL_FLAG_PROBLEM, MindRaiderConstants.MR_OWL_FLAG_PERSONAL, MindRaiderConstants.MR_OWL_FLAG_TODO }; // set flags as subclass of tag, flags and set label the same as local name (it is safe) OntClass tag= ontology.createClass(MindRaiderConstants.MR_OWL_TAG); OntClass flag = ontology.createClass(MindRaiderConstants.MR_OWL_FLAG); for(String flagUri: flagUris) { //OntClass flagClass = ontology.createClass(MindRaiderConstants.MR_OWL_TAG_NS+flagUri); OntClass flagClass = ontology.createClass(flagUri); flagClass.addLabel(flagUri, "en"); flag.addSubClass(flagClass); tag.addSubClass(flagClass); } // properties ObjectProperty flagProperty = ontology.createObjectProperty(MindRaiderConstants.MR_OWL_PROPERTY_FLAG); flagProperty.addRange(flag); return tagOntology; } /** * Build MR's OWL ontology. * * @param ontology * the OntModel */ private OwlOntology createMindRaiderOntology() { OwlOntology mrOntology = new OwlOntology(); OntModel ontology = mrOntology.getOntology(); // MR resource type classes OntClass mrResource = ontology.createClass(MindRaiderConstants.MR_OWL_CLASS_RESOURCE); OntClass profile = ontology.createClass(MindRaiderConstants.MR_OWL_CLASS_PROFILE); OntClass mindMap = ontology.createClass(MindRaiderConstants.MR_OWL_CLASS_MINDMAP); OntClass folder = ontology.createClass(MindRaiderConstants.MR_OWL_CLASS_FOLDER); OntClass notebook = ontology.createClass(MindRaiderConstants.MR_OWL_CLASS_NOTEBOOK); OntClass concept = ontology.createClass(MindRaiderConstants.MR_OWL_CLASS_CONCEPT); OntClass attachment = ontology.createClass(MindRaiderConstants.MR_OWL_CLASS_ATTACHMENT); OntClass localAttachment = ontology.createClass(MindRaiderConstants.MR_OWL_CLASS_LOCAL_ATTACHMENT); OntClass webAttachment = ontology.createClass(MindRaiderConstants.MR_OWL_CLASS_WEB_ATTACHMENT); OntClass mrAttachment = ontology.createClass(MindRaiderConstants.MR_OWL_CLASS_MR_ATTACHMENT); // taxonomy mrResource.addSubClass(mindMap); mrResource.addSubClass(profile); mindMap.addSubClass(folder); mindMap.addSubClass(notebook); mindMap.addSubClass(concept); mindMap.addSubClass(attachment); attachment.addSubClass(localAttachment); attachment.addSubClass(webAttachment); attachment.addSubClass(mrAttachment); // properties ObjectProperty hasAttachment = ontology.createObjectProperty(MindRaiderConstants.MR_OWL_PROPERTY_HAS_ATTACH); hasAttachment.addDomain(concept); hasAttachment.addRange(attachment); ontology.createObjectProperty(MindRaiderConstants.MR_OWL_PROPERTY_IS_DISCARDED); return mrOntology; } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sip.communicator.impl.protocol.jabber; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.*; import net.java.sip.communicator.util.Logger; import org.jitsi.util.*; import org.jitsi.utils.*; import org.jivesoftware.smack.*; import org.jivesoftware.smack.SmackException.*; import org.jivesoftware.smack.filter.*; import org.jivesoftware.smack.packet.*; import org.jivesoftware.smack.roster.*; import org.jivesoftware.smackx.disco.packet.*; import org.jxmpp.jid.*; import org.xmpp.jnodes.smack.*; /** * Search for jingle nodes. * * @author Damian Minkov */ public class JingleNodesServiceDiscovery implements Runnable { /** * Logger of this class */ private static final Logger logger = Logger.getLogger(JingleNodesServiceDiscovery.class); /** * Property containing jingle nodes prefix to search for. */ private static final String JINGLE_NODES_SEARCH_PREFIX_PROP = "net.java.sip.communicator.impl.protocol.jabber.JINGLE_NODES_SEARCH_PREFIXES"; /** * Property containing jingle nodes prefix to search for. */ private static final String JINGLE_NODES_SEARCH_PREFIXES_STOP_ON_FIRST_PROP = "net.java.sip.communicator.impl.protocol.jabber.JINGLE_NODES_SEARCH_PREFIXES_STOP_ON_FIRST"; /** * Synchronization object to monitor auto discovery. */ private final Object jingleNodesSyncRoot; /** * The service. */ private final SmackServiceNode service; /** * The connection, must be connected. */ private final XMPPConnection connection; /** * Our account. */ private final JabberAccountIDImpl accountID; /** * Creates discovery * @param service the service. * @param connection the connected connection. * @param accountID our account. * @param syncRoot the synchronization object while discovering. */ JingleNodesServiceDiscovery(SmackServiceNode service, XMPPConnection connection, JabberAccountIDImpl accountID, Object syncRoot) { this.jingleNodesSyncRoot = syncRoot; this.service = service; this.connection = connection; this.accountID = accountID; } /** * The actual discovery. */ public void run() { synchronized(jingleNodesSyncRoot) { long start = System.currentTimeMillis(); if(logger.isInfoEnabled()) { logger.info("Start Jingle Nodes discovery!"); } SmackServiceNode.MappedNodes nodes = null; String searchNodesWithPrefix = JabberActivator.getResources() .getSettingsString(JINGLE_NODES_SEARCH_PREFIX_PROP); if(searchNodesWithPrefix == null || searchNodesWithPrefix.length() == 0) searchNodesWithPrefix = JabberActivator.getConfigurationService() .getString(JINGLE_NODES_SEARCH_PREFIX_PROP); // if there are no default prefix settings or // this option is turned off, just process with default // service discovery making list empty. if( searchNodesWithPrefix == null || searchNodesWithPrefix.length() == 0 || searchNodesWithPrefix.equalsIgnoreCase("off")) { searchNodesWithPrefix = ""; } try { nodes = searchServicesWithPrefix( service, connection, 6, 3, 20, JingleChannelIQ.UDP, accountID.isJingleNodesSearchBuddiesEnabled(), accountID.isJingleNodesAutoDiscoveryEnabled(), searchNodesWithPrefix); } catch (NotConnectedException | InterruptedException e) { logger.error("Search failed", e); } if(logger.isInfoEnabled()) { logger.info("Jingle Nodes discovery terminated! "); logger.info("Found " + (nodes != null ? nodes.getRelayEntries().size() : "0") + " Jingle Nodes relay for account: " + accountID.getAccountAddress() + " in " + (System.currentTimeMillis() - start) + " ms."); } if(nodes != null) service.addEntries(nodes); } } /** * Searches for services as the prefix list has priority. If it is set * return after first found service. * * @param service the service. * @param xmppConnection the connection. * @param maxEntries maximum entries to be searched. * @param maxDepth the depth while recursively searching. * @param maxSearchNodes number of nodes to query * @param protocol the protocol * @param searchBuddies should we search our buddies in contactlist. * @param autoDiscover is auto discover turned on * @param prefix the coma separated list of prefixes to be searched first. * @return */ private SmackServiceNode.MappedNodes searchServicesWithPrefix( SmackServiceNode service, XMPPConnection xmppConnection, int maxEntries, int maxDepth, int maxSearchNodes, String protocol, boolean searchBuddies, boolean autoDiscover, String prefix) throws NotConnectedException, InterruptedException { if (xmppConnection == null || !xmppConnection.isConnected()) { return null; } SmackServiceNode.MappedNodes mappedNodes = new SmackServiceNode.MappedNodes(); ConcurrentHashMap<Jid, Jid> visited = new ConcurrentHashMap<>(); // Request to our pre-configured trackerEntries for(Entry<Jid, TrackerEntry> entry : service.getTrackerEntries().entrySet()) { SmackServiceNode.deepSearch( xmppConnection, maxEntries, entry.getValue().getJid(), mappedNodes, maxDepth - 1, maxSearchNodes, protocol, visited); } if(autoDiscover) { boolean continueSearch = searchDiscoItems( service, xmppConnection, maxEntries, xmppConnection.getXMPPServiceDomain(), mappedNodes, maxDepth - 1, maxSearchNodes, protocol, visited, prefix); // option to stop after first found is turned on, lets exit if(!continueSearch) return mappedNodes; // Request to Server SmackServiceNode.deepSearch( xmppConnection, maxEntries, xmppConnection.getXMPPServiceDomain(), mappedNodes, maxDepth - 1, maxSearchNodes, protocol, visited); // Request to Buddies Roster r = Roster.getInstanceFor(xmppConnection); if (r != null && searchBuddies) { for (final RosterEntry re : r.getEntries()) { for (final Presence presence : r.getPresences(re.getJid())) { if (presence.isAvailable()) { SmackServiceNode.deepSearch( xmppConnection, maxEntries, presence.getFrom(), mappedNodes, maxDepth - 1, maxSearchNodes, protocol, visited); } } } } } return null; } /** * Discover services and query them. * @param service the service. * @param xmppConnection the connection. * @param maxEntries maximum entries to be searched. * @param startPoint the start point to search recursively * @param mappedNodes nodes found * @param maxDepth the depth while recursively searching. * @param maxSearchNodes number of nodes to query * @param protocol the protocol * @param visited nodes already visited * @param prefix the coma separated list of prefixes to be searched first. * @return */ private static boolean searchDiscoItems( SmackServiceNode service, XMPPConnection xmppConnection, int maxEntries, DomainBareJid startPoint, SmackServiceNode.MappedNodes mappedNodes, int maxDepth, int maxSearchNodes, String protocol, ConcurrentHashMap<Jid, Jid> visited, String prefix) throws InterruptedException, NotConnectedException { String[] prefixes = prefix.split(","); // default is to stop when first one is found boolean stopOnFirst = true; String stopOnFirstDefaultValue = JabberActivator.getResources().getSettingsString( JINGLE_NODES_SEARCH_PREFIXES_STOP_ON_FIRST_PROP); if(stopOnFirstDefaultValue != null) { stopOnFirst = Boolean.parseBoolean(stopOnFirstDefaultValue); } stopOnFirst = JabberActivator.getConfigurationService().getBoolean( JINGLE_NODES_SEARCH_PREFIXES_STOP_ON_FIRST_PROP, stopOnFirst); final DiscoverItems items = new DiscoverItems(); items.setTo(startPoint); StanzaCollector collector = xmppConnection.createStanzaCollectorAndSend(items); DiscoverItems result = null; try { result = (DiscoverItems) collector.nextResult( Math.round(SmackConfiguration.getDefaultReplyTimeout() * 1.5)); } finally { collector.cancel(); } if (result != null) { // first search priority items for (DiscoverItems.Item item : result.getItems()) { for(String pref : prefixes) { if( !StringUtils.isNullOrEmpty(pref) && item.getEntityID().toString().startsWith(pref.trim())) { SmackServiceNode.deepSearch( xmppConnection, maxEntries, item.getEntityID(), mappedNodes, maxDepth, maxSearchNodes, protocol, visited); if(stopOnFirst) return false;// stop and don't continue } } } // now search rest for (DiscoverItems.Item item : result.getItems()) { // we may searched already this node if it starts // with some of the prefixes if(!visited.containsKey(item.getEntityID().toString())) SmackServiceNode.deepSearch( xmppConnection, maxEntries, item.getEntityID(), mappedNodes, maxDepth, maxSearchNodes, protocol, visited); if(stopOnFirst) return false;// stop and don't continue } } // true we should continue searching return true; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.redshiftdataapi.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/redshift-data-2019-12-20/DescribeTable" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeTableRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The cluster identifier. This parameter is required when connecting to a cluster and authenticating using either * Secrets Manager or temporary credentials. * </p> */ private String clusterIdentifier; /** * <p> * A database name. The connected database is specified when you connect with your authentication credentials. * </p> */ private String connectedDatabase; /** * <p> * The name of the database that contains the tables to be described. If <code>ConnectedDatabase</code> is not * specified, this is also the database to connect to with your authentication credentials. * </p> */ private String database; /** * <p> * The database user name. This parameter is required when connecting to a cluster and authenticating using * temporary credentials. * </p> */ private String dbUser; /** * <p> * The maximum number of tables to return in the response. If more tables exist than fit in one response, then * <code>NextToken</code> is returned to page through the results. * </p> */ private Integer maxResults; /** * <p> * A value that indicates the starting point for the next set of response records in a subsequent request. If a * value is returned in a response, you can retrieve the next set of records by providing this returned NextToken * value in the next NextToken parameter and retrying the command. If the NextToken field is empty, all response * records have been retrieved for the request. * </p> */ private String nextToken; /** * <p> * The schema that contains the table. If no schema is specified, then matching tables for all schemas are returned. * </p> */ private String schema; /** * <p> * The name or ARN of the secret that enables access to the database. This parameter is required when authenticating * using Secrets Manager. * </p> */ private String secretArn; /** * <p> * The table name. If no table is specified, then all tables for all matching schemas are returned. If no table and * no schema is specified, then all tables for all schemas in the database are returned * </p> */ private String table; /** * <p> * The cluster identifier. This parameter is required when connecting to a cluster and authenticating using either * Secrets Manager or temporary credentials. * </p> * * @param clusterIdentifier * The cluster identifier. This parameter is required when connecting to a cluster and authenticating using * either Secrets Manager or temporary credentials. */ public void setClusterIdentifier(String clusterIdentifier) { this.clusterIdentifier = clusterIdentifier; } /** * <p> * The cluster identifier. This parameter is required when connecting to a cluster and authenticating using either * Secrets Manager or temporary credentials. * </p> * * @return The cluster identifier. This parameter is required when connecting to a cluster and authenticating using * either Secrets Manager or temporary credentials. */ public String getClusterIdentifier() { return this.clusterIdentifier; } /** * <p> * The cluster identifier. This parameter is required when connecting to a cluster and authenticating using either * Secrets Manager or temporary credentials. * </p> * * @param clusterIdentifier * The cluster identifier. This parameter is required when connecting to a cluster and authenticating using * either Secrets Manager or temporary credentials. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeTableRequest withClusterIdentifier(String clusterIdentifier) { setClusterIdentifier(clusterIdentifier); return this; } /** * <p> * A database name. The connected database is specified when you connect with your authentication credentials. * </p> * * @param connectedDatabase * A database name. The connected database is specified when you connect with your authentication * credentials. */ public void setConnectedDatabase(String connectedDatabase) { this.connectedDatabase = connectedDatabase; } /** * <p> * A database name. The connected database is specified when you connect with your authentication credentials. * </p> * * @return A database name. The connected database is specified when you connect with your authentication * credentials. */ public String getConnectedDatabase() { return this.connectedDatabase; } /** * <p> * A database name. The connected database is specified when you connect with your authentication credentials. * </p> * * @param connectedDatabase * A database name. The connected database is specified when you connect with your authentication * credentials. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeTableRequest withConnectedDatabase(String connectedDatabase) { setConnectedDatabase(connectedDatabase); return this; } /** * <p> * The name of the database that contains the tables to be described. If <code>ConnectedDatabase</code> is not * specified, this is also the database to connect to with your authentication credentials. * </p> * * @param database * The name of the database that contains the tables to be described. If <code>ConnectedDatabase</code> is * not specified, this is also the database to connect to with your authentication credentials. */ public void setDatabase(String database) { this.database = database; } /** * <p> * The name of the database that contains the tables to be described. If <code>ConnectedDatabase</code> is not * specified, this is also the database to connect to with your authentication credentials. * </p> * * @return The name of the database that contains the tables to be described. If <code>ConnectedDatabase</code> is * not specified, this is also the database to connect to with your authentication credentials. */ public String getDatabase() { return this.database; } /** * <p> * The name of the database that contains the tables to be described. If <code>ConnectedDatabase</code> is not * specified, this is also the database to connect to with your authentication credentials. * </p> * * @param database * The name of the database that contains the tables to be described. If <code>ConnectedDatabase</code> is * not specified, this is also the database to connect to with your authentication credentials. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeTableRequest withDatabase(String database) { setDatabase(database); return this; } /** * <p> * The database user name. This parameter is required when connecting to a cluster and authenticating using * temporary credentials. * </p> * * @param dbUser * The database user name. This parameter is required when connecting to a cluster and authenticating using * temporary credentials. */ public void setDbUser(String dbUser) { this.dbUser = dbUser; } /** * <p> * The database user name. This parameter is required when connecting to a cluster and authenticating using * temporary credentials. * </p> * * @return The database user name. This parameter is required when connecting to a cluster and authenticating using * temporary credentials. */ public String getDbUser() { return this.dbUser; } /** * <p> * The database user name. This parameter is required when connecting to a cluster and authenticating using * temporary credentials. * </p> * * @param dbUser * The database user name. This parameter is required when connecting to a cluster and authenticating using * temporary credentials. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeTableRequest withDbUser(String dbUser) { setDbUser(dbUser); return this; } /** * <p> * The maximum number of tables to return in the response. If more tables exist than fit in one response, then * <code>NextToken</code> is returned to page through the results. * </p> * * @param maxResults * The maximum number of tables to return in the response. If more tables exist than fit in one response, * then <code>NextToken</code> is returned to page through the results. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * The maximum number of tables to return in the response. If more tables exist than fit in one response, then * <code>NextToken</code> is returned to page through the results. * </p> * * @return The maximum number of tables to return in the response. If more tables exist than fit in one response, * then <code>NextToken</code> is returned to page through the results. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * The maximum number of tables to return in the response. If more tables exist than fit in one response, then * <code>NextToken</code> is returned to page through the results. * </p> * * @param maxResults * The maximum number of tables to return in the response. If more tables exist than fit in one response, * then <code>NextToken</code> is returned to page through the results. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeTableRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * A value that indicates the starting point for the next set of response records in a subsequent request. If a * value is returned in a response, you can retrieve the next set of records by providing this returned NextToken * value in the next NextToken parameter and retrying the command. If the NextToken field is empty, all response * records have been retrieved for the request. * </p> * * @param nextToken * A value that indicates the starting point for the next set of response records in a subsequent request. If * a value is returned in a response, you can retrieve the next set of records by providing this returned * NextToken value in the next NextToken parameter and retrying the command. If the NextToken field is empty, * all response records have been retrieved for the request. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * A value that indicates the starting point for the next set of response records in a subsequent request. If a * value is returned in a response, you can retrieve the next set of records by providing this returned NextToken * value in the next NextToken parameter and retrying the command. If the NextToken field is empty, all response * records have been retrieved for the request. * </p> * * @return A value that indicates the starting point for the next set of response records in a subsequent request. * If a value is returned in a response, you can retrieve the next set of records by providing this returned * NextToken value in the next NextToken parameter and retrying the command. If the NextToken field is * empty, all response records have been retrieved for the request. */ public String getNextToken() { return this.nextToken; } /** * <p> * A value that indicates the starting point for the next set of response records in a subsequent request. If a * value is returned in a response, you can retrieve the next set of records by providing this returned NextToken * value in the next NextToken parameter and retrying the command. If the NextToken field is empty, all response * records have been retrieved for the request. * </p> * * @param nextToken * A value that indicates the starting point for the next set of response records in a subsequent request. If * a value is returned in a response, you can retrieve the next set of records by providing this returned * NextToken value in the next NextToken parameter and retrying the command. If the NextToken field is empty, * all response records have been retrieved for the request. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeTableRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * The schema that contains the table. If no schema is specified, then matching tables for all schemas are returned. * </p> * * @param schema * The schema that contains the table. If no schema is specified, then matching tables for all schemas are * returned. */ public void setSchema(String schema) { this.schema = schema; } /** * <p> * The schema that contains the table. If no schema is specified, then matching tables for all schemas are returned. * </p> * * @return The schema that contains the table. If no schema is specified, then matching tables for all schemas are * returned. */ public String getSchema() { return this.schema; } /** * <p> * The schema that contains the table. If no schema is specified, then matching tables for all schemas are returned. * </p> * * @param schema * The schema that contains the table. If no schema is specified, then matching tables for all schemas are * returned. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeTableRequest withSchema(String schema) { setSchema(schema); return this; } /** * <p> * The name or ARN of the secret that enables access to the database. This parameter is required when authenticating * using Secrets Manager. * </p> * * @param secretArn * The name or ARN of the secret that enables access to the database. This parameter is required when * authenticating using Secrets Manager. */ public void setSecretArn(String secretArn) { this.secretArn = secretArn; } /** * <p> * The name or ARN of the secret that enables access to the database. This parameter is required when authenticating * using Secrets Manager. * </p> * * @return The name or ARN of the secret that enables access to the database. This parameter is required when * authenticating using Secrets Manager. */ public String getSecretArn() { return this.secretArn; } /** * <p> * The name or ARN of the secret that enables access to the database. This parameter is required when authenticating * using Secrets Manager. * </p> * * @param secretArn * The name or ARN of the secret that enables access to the database. This parameter is required when * authenticating using Secrets Manager. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeTableRequest withSecretArn(String secretArn) { setSecretArn(secretArn); return this; } /** * <p> * The table name. If no table is specified, then all tables for all matching schemas are returned. If no table and * no schema is specified, then all tables for all schemas in the database are returned * </p> * * @param table * The table name. If no table is specified, then all tables for all matching schemas are returned. If no * table and no schema is specified, then all tables for all schemas in the database are returned */ public void setTable(String table) { this.table = table; } /** * <p> * The table name. If no table is specified, then all tables for all matching schemas are returned. If no table and * no schema is specified, then all tables for all schemas in the database are returned * </p> * * @return The table name. If no table is specified, then all tables for all matching schemas are returned. If no * table and no schema is specified, then all tables for all schemas in the database are returned */ public String getTable() { return this.table; } /** * <p> * The table name. If no table is specified, then all tables for all matching schemas are returned. If no table and * no schema is specified, then all tables for all schemas in the database are returned * </p> * * @param table * The table name. If no table is specified, then all tables for all matching schemas are returned. If no * table and no schema is specified, then all tables for all schemas in the database are returned * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeTableRequest withTable(String table) { setTable(table); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getClusterIdentifier() != null) sb.append("ClusterIdentifier: ").append(getClusterIdentifier()).append(","); if (getConnectedDatabase() != null) sb.append("ConnectedDatabase: ").append(getConnectedDatabase()).append(","); if (getDatabase() != null) sb.append("Database: ").append(getDatabase()).append(","); if (getDbUser() != null) sb.append("DbUser: ").append(getDbUser()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getSchema() != null) sb.append("Schema: ").append(getSchema()).append(","); if (getSecretArn() != null) sb.append("SecretArn: ").append(getSecretArn()).append(","); if (getTable() != null) sb.append("Table: ").append(getTable()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeTableRequest == false) return false; DescribeTableRequest other = (DescribeTableRequest) obj; if (other.getClusterIdentifier() == null ^ this.getClusterIdentifier() == null) return false; if (other.getClusterIdentifier() != null && other.getClusterIdentifier().equals(this.getClusterIdentifier()) == false) return false; if (other.getConnectedDatabase() == null ^ this.getConnectedDatabase() == null) return false; if (other.getConnectedDatabase() != null && other.getConnectedDatabase().equals(this.getConnectedDatabase()) == false) return false; if (other.getDatabase() == null ^ this.getDatabase() == null) return false; if (other.getDatabase() != null && other.getDatabase().equals(this.getDatabase()) == false) return false; if (other.getDbUser() == null ^ this.getDbUser() == null) return false; if (other.getDbUser() != null && other.getDbUser().equals(this.getDbUser()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getSchema() == null ^ this.getSchema() == null) return false; if (other.getSchema() != null && other.getSchema().equals(this.getSchema()) == false) return false; if (other.getSecretArn() == null ^ this.getSecretArn() == null) return false; if (other.getSecretArn() != null && other.getSecretArn().equals(this.getSecretArn()) == false) return false; if (other.getTable() == null ^ this.getTable() == null) return false; if (other.getTable() != null && other.getTable().equals(this.getTable()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getClusterIdentifier() == null) ? 0 : getClusterIdentifier().hashCode()); hashCode = prime * hashCode + ((getConnectedDatabase() == null) ? 0 : getConnectedDatabase().hashCode()); hashCode = prime * hashCode + ((getDatabase() == null) ? 0 : getDatabase().hashCode()); hashCode = prime * hashCode + ((getDbUser() == null) ? 0 : getDbUser().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getSchema() == null) ? 0 : getSchema().hashCode()); hashCode = prime * hashCode + ((getSecretArn() == null) ? 0 : getSecretArn().hashCode()); hashCode = prime * hashCode + ((getTable() == null) ? 0 : getTable().hashCode()); return hashCode; } @Override public DescribeTableRequest clone() { return (DescribeTableRequest) super.clone(); } }
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.rrt.lib; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.Map; import org.apache.james.lifecycle.api.LifecycleUtil; import org.apache.james.rrt.api.RecipientRewriteTable; import org.apache.james.rrt.api.RecipientRewriteTable.ErrorMappingException; import org.apache.james.rrt.api.RecipientRewriteTableException; import org.junit.After; import org.junit.Before; import org.junit.Test; /** * The abstract test for the virtual user table. Contains tests related to * simple, regexp, wildcard, error,... Extend this and instanciate the needed * virtualUserTable implementation. */ public abstract class AbstractRecipientRewriteTableTest { protected AbstractRecipientRewriteTable virtualUserTable; protected final static int REGEX_TYPE = 0; protected final static int ERROR_TYPE = 1; protected final static int ADDRESS_TYPE = 2; protected final static int ALIASDOMAIN_TYPE = 3; @Before public void setUp() throws Exception { virtualUserTable = getRecipientRewriteTable(); } @After public void tearDown() throws Exception { Map<String, Collection<String>> mappings = virtualUserTable.getAllMappings(); if (mappings != null) { for (String key : virtualUserTable.getAllMappings().keySet()) { String args[] = key.split("@"); Collection<String> map = mappings.get(key); for (String aMap : map) { try { removeMapping(args[0], args[1], aMap); } catch (IllegalArgumentException e) { e.printStackTrace(); } } } } LifecycleUtil.dispose(virtualUserTable); } @Test public void testStoreAndRetrieveRegexMapping() throws org.apache.james.rrt.api.RecipientRewriteTable.ErrorMappingException, RecipientRewriteTableException { String user = "test"; String domain = "localhost"; // String regex = "(.*):{$1}@localhost"; // String regex2 = "(.+):{$1}@test"; String regex = "(.*)@localhost"; String regex2 = "(.+)@test"; String invalidRegex = ".*):"; boolean catched = false; try { assertNull("No mapping", virtualUserTable.getMappings(user, domain)); assertTrue("Added virtual mapping", addMapping(user, domain, regex, REGEX_TYPE)); assertTrue("Added virtual mapping", addMapping(user, domain, regex2, REGEX_TYPE)); assertEquals("Two mappings", virtualUserTable.getMappings(user, domain).size(), 2); assertEquals("One mappingline", virtualUserTable.getAllMappings().size(), 1); assertTrue("remove virtual mapping", removeMapping(user, domain, regex, REGEX_TYPE)); try { virtualUserTable.addRegexMapping(user, domain, invalidRegex); } catch (RecipientRewriteTableException e) { catched = true; } assertTrue("Invalid Mapping throw exception", catched); assertTrue("remove virtual mapping", removeMapping(user, domain, regex2, REGEX_TYPE)); assertNull("No mapping", virtualUserTable.getMappings(user, domain)); assertNull("No mappings", virtualUserTable.getAllMappings()); } catch (IllegalArgumentException e) { e.printStackTrace(); fail("Storing failed"); } } @Test public void testStoreAndRetrieveAddressMapping() throws ErrorMappingException, RecipientRewriteTableException { String user = "test"; String domain = "localhost"; String address = "test@localhost2"; String address2 = "test@james"; try { assertNull("No mapping", virtualUserTable.getMappings(user, domain)); assertTrue("Added virtual mapping", addMapping(user, domain, address, ADDRESS_TYPE)); assertTrue("Added virtual mapping", addMapping(user, domain, address2, ADDRESS_TYPE)); assertEquals("Two mappings", virtualUserTable.getMappings(user, domain).size(), 2); assertEquals("One mappingline", virtualUserTable.getAllMappings().size(), 1); assertTrue("remove virtual mapping", removeMapping(user, domain, address, ADDRESS_TYPE)); /* * TEMPORARILY REMOVE JDBC specific test String invalidAddress= * ".*@localhost2:"; boolean catched = false; if (virtualUserTable * instanceof JDBCRecipientRewriteTable) { try { * assertTrue("Added virtual mapping", addMapping(user, domain, * invalidAddress, ADDRESS_TYPE)); } catch (InvalidMappingException * e) { catched = true; } * assertTrue("Invalid Mapping throw exception" , catched); } */ assertTrue("remove virtual mapping", removeMapping(user, domain, address2, ADDRESS_TYPE)); assertNull("No mapping", virtualUserTable.getMappings(user, domain)); assertNull("No mappings", virtualUserTable.getAllMappings()); } catch (IllegalArgumentException e) { fail("Storing failed"); } } @Test public void testStoreAndRetrieveErrorMapping() throws ErrorMappingException, RecipientRewriteTableException { String user = "test"; String domain = "localhost"; String error = "bounce!"; boolean catched = false; try { assertNull("No mapping", virtualUserTable.getMappings(user, domain)); assertTrue("Added virtual mapping", addMapping(user, domain, error, ERROR_TYPE)); assertEquals("One mappingline", virtualUserTable.getAllMappings().size(), 1); try { virtualUserTable.getMappings(user, domain); } catch (ErrorMappingException e) { catched = true; } assertTrue("Error Mapping throw exception", catched); assertTrue("remove virtual mapping", removeMapping(user, domain, error, ERROR_TYPE)); assertNull("No mapping", virtualUserTable.getMappings(user, domain)); assertNull("No mappings", virtualUserTable.getAllMappings()); } catch (IllegalArgumentException e) { fail("Storing failed"); } } @Test public void testStoreAndRetrieveWildCardAddressMapping() throws ErrorMappingException, RecipientRewriteTableException { String user = "test"; String user2 = "test2"; String domain = "localhost"; String address = "test@localhost2"; String address2 = "test@james"; try { assertNull("No mapping", virtualUserTable.getMappings(user, domain)); assertTrue("Added virtual mapping", addMapping(RecipientRewriteTable.WILDCARD, domain, address, ADDRESS_TYPE)); assertTrue("Added virtual mapping", addMapping(user, domain, address2, ADDRESS_TYPE)); assertEquals("One mappings", 1, virtualUserTable.getMappings(user, domain).size()); assertEquals("One mappings", 1, virtualUserTable.getMappings(user2, domain).size()); assertTrue("remove virtual mapping", removeMapping(user, domain, address2, ADDRESS_TYPE)); assertTrue("remove virtual mapping", removeMapping(RecipientRewriteTable.WILDCARD, domain, address, ADDRESS_TYPE)); assertNull("No mapping", virtualUserTable.getMappings(user, domain)); assertNull("No mapping", virtualUserTable.getMappings(user2, domain)); } catch (IllegalArgumentException e) { fail("Storing failed"); } } @Test public void testRecursiveMapping() throws ErrorMappingException, RecipientRewriteTableException { String user1 = "user1"; String user2 = "user2"; String user3 = "user3"; String domain1 = "domain1"; String domain2 = "domain2"; String domain3 = "domain3"; boolean exception1 = false; virtualUserTable.setRecursiveMapping(true); try { assertNull("No mappings", virtualUserTable.getAllMappings()); assertTrue("Add mapping", addMapping(user1, domain1, user2 + "@" + domain2, ADDRESS_TYPE)); assertTrue("Add mapping", addMapping(user2, domain2, user3 + "@" + domain3, ADDRESS_TYPE)); assertEquals("Recursive mapped", virtualUserTable.getMappings(user1, domain1).iterator().next(), user3 + "@" + domain3); assertTrue("Add mapping", addMapping(user3, domain3, user1 + "@" + domain1, ADDRESS_TYPE)); try { virtualUserTable.getMappings(user1, domain1); } catch (ErrorMappingException e) { exception1 = true; } assertTrue("Exception thrown on to many mappings", exception1); // disable recursive mapping virtualUserTable.setRecursiveMapping(false); assertEquals("Not recursive mapped", virtualUserTable.getMappings(user1, domain1).iterator().next(), user2 + "@" + domain2); } catch (IllegalArgumentException e) { fail("Storing failed"); } } @Test public void testAliasDomainMapping() throws ErrorMappingException, RecipientRewriteTableException { String domain = "realdomain"; String aliasDomain = "aliasdomain"; String user = "user"; String user2 = "user2"; assertNull("No mappings", virtualUserTable.getAllMappings()); try { assertTrue("Add mapping", addMapping(RecipientRewriteTable.WILDCARD, aliasDomain, user2 + "@" + domain, ADDRESS_TYPE)); assertTrue("Add aliasDomain mapping", addMapping(RecipientRewriteTable.WILDCARD, aliasDomain, domain, ALIASDOMAIN_TYPE)); Iterator<String> mappings = virtualUserTable.getMappings(user, aliasDomain).iterator(); assertEquals("Domain mapped as first ", mappings.next(), user + "@" + domain); assertEquals("Address mapped as second ", mappings.next(), user2 + "@" + domain); assertTrue("Remove mapping", removeMapping(RecipientRewriteTable.WILDCARD, aliasDomain, user2 + "@" + domain, ADDRESS_TYPE)); assertTrue("Remove aliasDomain mapping", removeMapping(RecipientRewriteTable.WILDCARD, aliasDomain, domain, ALIASDOMAIN_TYPE)); } catch (IllegalArgumentException e) { fail("Storing failed"); } } @Test public void sortMappingsShouldReturnNullWhenNull() { assertNull(AbstractRecipientRewriteTable.sortMappings(null)); } @Test public void sortMappingsShouldReturnEmptyWhenEmpty() { assertEquals("", AbstractRecipientRewriteTable.sortMappings("")); } @Test public void sortMappingsShouldReturnSameStringWhenSingleDomainAlias() { String singleDomainAlias = RecipientRewriteTable.ALIASDOMAIN_PREFIX + "first"; assertEquals(singleDomainAlias, AbstractRecipientRewriteTable.sortMappings(singleDomainAlias)); } @Test public void sortMappingsShouldReturnSameStringWhenTwoDomainAliases() { String firstAliasMapping = RecipientRewriteTable.ALIASDOMAIN_PREFIX + "first"; String secondAliasMapping = RecipientRewriteTable.ALIASDOMAIN_PREFIX + "second"; String mappings = RecipientRewriteTableUtil.CollectionToMapping(Arrays.asList(firstAliasMapping, secondAliasMapping)); assertEquals(mappings, AbstractRecipientRewriteTable.sortMappings(mappings)); } @Test public void sortMappingsShouldPutDomainAliasFirstWhenVariousMappings() { String regexMapping = RecipientRewriteTable.REGEX_PREFIX + "first"; String domainMapping = RecipientRewriteTable.ALIASDOMAIN_PREFIX + "second"; String inputMappings = RecipientRewriteTableUtil.CollectionToMapping(Arrays.asList(regexMapping, domainMapping)); String expectedMappings = RecipientRewriteTableUtil.CollectionToMapping(Arrays.asList(domainMapping, regexMapping)); assertEquals(expectedMappings, AbstractRecipientRewriteTable.sortMappings(inputMappings)); } protected abstract AbstractRecipientRewriteTable getRecipientRewriteTable() throws Exception; protected abstract boolean addMapping(String user, String domain, String mapping, int type) throws RecipientRewriteTableException; protected abstract boolean removeMapping(String user, String domain, String mapping, int type) throws RecipientRewriteTableException; private void removeMapping(String user, String domain, String rawMapping) throws RecipientRewriteTableException { if (rawMapping.startsWith(RecipientRewriteTable.ERROR_PREFIX)) { removeMapping(user, domain, rawMapping.substring(RecipientRewriteTable.ERROR_PREFIX.length()), ERROR_TYPE); } else if (rawMapping.startsWith(RecipientRewriteTable.REGEX_PREFIX)) { removeMapping(user, domain, rawMapping.substring(RecipientRewriteTable.REGEX_PREFIX.length()), REGEX_TYPE); } else if (rawMapping.startsWith(RecipientRewriteTable.ALIASDOMAIN_PREFIX)) { removeMapping(user, domain, rawMapping.substring(RecipientRewriteTable.ALIASDOMAIN_PREFIX.length()), ALIASDOMAIN_TYPE); } else { removeMapping(user, domain, rawMapping, ADDRESS_TYPE); } } }