gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/******************************************************************************* * Copyright 2006 - 2012 Vienna University of Technology, * Department of Software Technology and Interactive Systems, IFS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package eu.scape_project.planning.xml.plan; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.Serializable; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.text.ParseException; import eu.scape_project.planning.model.ByteStream; import eu.scape_project.planning.model.ChangeLog; import org.dom4j.Document; import org.dom4j.io.DOMReader; import org.dom4j.io.OutputFormat; import org.dom4j.io.XMLWriter; import org.w3c.dom.Element; /** * Helper class for {@link eu.scape_project.planning.xml.PlanParser} to read an * XML element. Can set the decoded data to other objects which have a function * setData(byte[] data) */ public class XMLDataWrapper implements Serializable { private static final long serialVersionUID = 2080538998419720006L; /** * Encoding used for writing data. */ private static final String ENCODING = "UTF-8"; /** * The default output format used for this class. */ public static final OutputFormat DEFAULT_OUTPUT_FORMAT; private OutputFormat outputFormat = DEFAULT_OUTPUT_FORMAT; private String methodName = "setData"; private String changeLogMethodName = null; private byte[] data = null; private ChangeLog changeLog = null; static { DEFAULT_OUTPUT_FORMAT = OutputFormat.createPrettyPrint(); DEFAULT_OUTPUT_FORMAT.setEncoding(ENCODING); } /** * Reads an XML element and <code>value</code> and keeps this data for the * next call of {@link #setData(Object)}. * * Additionally checks the <code>value</code> if a <code>changelog</code> * element is present and stores it for the next call of * {@link #setChangeLog(Object)}. * * @param value * the value to create * @throws IOException * if the data could not be written * @throws ParseException * if the data could not be parsed */ public void setEncoded(Element value) throws IOException, ParseException { DOMReader reader = new DOMReader(); org.w3c.dom.Document w3cDocument = value.getOwnerDocument(); w3cDocument.appendChild(value); Document doc = reader.read(w3cDocument); org.dom4j.Element changeLogElement = (org.dom4j.Element) doc.selectSingleNode("//*[local-name()='changelog']"); createChangeLog(changeLogElement); ByteArrayOutputStream out = new ByteArrayOutputStream(); XMLWriter writer = new XMLWriter(out, outputFormat); writer.write(doc); this.data = out.toByteArray(); } /** * Invokes the previously set method <methodName> on <code>object</code> via * reflection - with previously decoded data as parameter. * * @param object * the object where the data should be set * @throws NoSuchMethodException * if the method could not be invoked * @throws IllegalAccessException * if the method could not be invoked * @throws InvocationTargetException * if the method could not be invoked */ public void setData(Object object) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { ByteStream bs = new ByteStream(); bs.setData(data); Method setDataMethod = object.getClass().getMethod(methodName, ByteStream.class); setDataMethod.invoke(object, new Object[] {bs}); } /** * Invokes the previously set method <methodName> on <code>object</code> via * reflection - with previously decoded data as parameter. * * @param object * the object where the data should be set * @throws NoSuchMethodException * if the method could not be invoked * @throws IllegalAccessException * if the method could not be invoked * @throws InvocationTargetException * if the method could not be invoked */ public void setString(Object object) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { ByteStream bs = new ByteStream(); bs.setData(data); Method setDataMethod = object.getClass().getMethod(methodName, String.class); String dataString = new String(data); setDataMethod.invoke(object, new Object[] {dataString}); } /** * Invokes the previously set method <changeLogMethodName> on * <code>object</code> via reflection - with previously decoded change log * data as parameter. * * @param object * the object where the change log should be set * @throws NoSuchMethodException * if the method could not be invoked * @throws IllegalAccessException * if the method could not be invoked * @throws InvocationTargetException * if the method could not be invoked */ public void setChangeLog(Object object) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { Method setData = object.getClass().getMethod(changeLogMethodName, ChangeLog.class); setData.invoke(object, new Object[] {changeLog}); } /** * Creates a change log from the provided changeLogElement. * * @param changeLogElement * the element containing the change log * @throws ParseException * if the date could not be parsed */ private void createChangeLog(org.dom4j.Element changeLogElement) throws ParseException { changeLog = new ChangeLog(); if (changeLogElement == null) { return; } TimestampFormatter formatter = new TimestampFormatter(); changeLog.setChangedBy(changeLogElement.attributeValue("changedBy")); changeLog.setCreatedBy(changeLogElement.attributeValue("createdBy")); String changed = changeLogElement.attributeValue("changed"); String created = changeLogElement.attributeValue("created"); changeLog.setChanged(formatter.parseTimestamp(changed)); changeLog.setCreated(formatter.parseTimestamp(created)); } // ---------- getter/setter ---------- public String getMethodName() { return methodName; } public void setMethodName(String methodName) { this.methodName = methodName; } public String getChangeLogMethodName() { return changeLogMethodName; } public void setChangeLogMethodName(String changeLogMethodName) { this.changeLogMethodName = changeLogMethodName; } public OutputFormat getOutputFormat() { return outputFormat; } public void setOutputFormat(OutputFormat outputFormat) { this.outputFormat = outputFormat; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.taeconfigurator.editors.ui; import java.util.Arrays; import org.apache.uima.resource.metadata.ConfigurationGroup; import org.apache.uima.resource.metadata.ConfigurationParameter; import org.apache.uima.resource.metadata.ConfigurationParameterSettings; import org.apache.uima.taeconfigurator.InternalErrorCDE; import org.apache.uima.taeconfigurator.editors.MultiPageEditor; import org.apache.uima.taeconfigurator.editors.ui.dialogs.CommonInputDialog; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.window.Window; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.StackLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.forms.IManagedForm; /** * The Class ValueSection. */ public class ValueSection extends AbstractSectionParm { /** The value text stack. */ private Composite valueTextStack; /** The value text stack layout. */ private StackLayout valueTextStackLayout; /** The value text. */ private Text valueText; /** The value text combo. */ private CCombo valueTextCombo; /** The vtc 1. */ private Composite vtc1; /** The vtc 2. */ private Composite vtc2; /** The value table. */ private Table valueTable; /** The master. */ private ParameterSettingsSection master; /** The add button. */ private Button addButton; /** The edit button. */ private Button editButton; /** The remove button. */ private Button removeButton; /** The up button. */ private Button upButton; /** The down button. */ private Button downButton; /** The button container. */ private Composite buttonContainer; /** The selected CP. */ private ConfigurationParameter selectedCP; /** The model settings. */ private ConfigurationParameterSettings modelSettings; /** * Instantiates a new value section. * * @param editor * the editor * @param parent * the parent */ public ValueSection(MultiPageEditor editor, Composite parent) { super(editor, parent, "Values", "Specify the value of the selected configuration parameter."); } /* * Called by the page constructor after all sections are created, to initialize them. * (non-Javadoc) * * @see org.eclipse.ui.forms.IFormPart#initialize(org.eclipse.ui.forms.IManagedForm) */ @Override public void initialize(IManagedForm form) { super.initialize(form); master = editor.getSettingsPage().getParameterSettingsSection(); Composite sectionClient = new3ColumnComposite(this.getSection()); enableBorders(sectionClient); toolkit.paintBordersFor(sectionClient); valueTextStack = newComposite(sectionClient); valueTextStack .setLayoutData(new GridData(GridData.FILL_HORIZONTAL + GridData.VERTICAL_ALIGN_FILL)); ((GridData) valueTextStack.getLayoutData()).horizontalSpan = 2; valueTextStack.setLayout(valueTextStackLayout = new StackLayout()); valueTextStackLayout.marginHeight = 5; valueTextStackLayout.marginWidth = 5; vtc1 = new2ColumnComposite(valueTextStack); vtc2 = new2ColumnComposite(valueTextStack); enableBorders(vtc1); enableBorders(vtc2); toolkit.paintBordersFor(vtc1); toolkit.paintBordersFor(vtc2); valueText = newLabeledTextField(vtc1, "Value", "Specify the value", SWT.NONE); valueTextCombo = newLabeledCComboWithTip(vtc2, "Value", "Use the combo pulldown to pick True or False"); valueTextCombo.add("true"); valueTextCombo.add("false"); spacer(sectionClient); Label valueListLabel = toolkit.createLabel(sectionClient, "Value list:"); valueListLabel.setLayoutData(new GridData(SWT.TOP)); valueTable = newTable(sectionClient, SWT.MULTI, 0); // no column spec in table is an idiom that makes it a fancy list // Buttons buttonContainer = newButtonContainer(sectionClient); addButton = newPushButton(buttonContainer, S_ADD, "Click here to add a value to the list."); editButton = newPushButton(buttonContainer, S_EDIT, S_EDIT_TIP); removeButton = newPushButton(buttonContainer, S_REMOVE, S_REMOVE_TIP); upButton = newPushButton(buttonContainer, S_UP, S_UP_TIP); downButton = newPushButton(buttonContainer, S_DOWN, S_DOWN_TIP); } /* * (non-Javadoc) * * @see org.eclipse.ui.forms.IFormPart#refresh() */ @Override public void refresh() { super.refresh(); master = editor.getSettingsPage().getParameterSettingsSection(); valueTextStackLayout.topControl = vtc1; selectedCP = master.getSelectedModelParameter(); if (selectedCP == null) { // no param selected valueText.setText(""); valueTable.removeAll(); } else { // parm selected in master view Object modelValue; modelSettings = getModelSettings(); String groupName = master.getSelectedParamGroupName(); String parmName = selectedCP.getName(); modelValue = (NOT_IN_ANY_GROUP.equals(groupName)) ? modelSettings.getParameterValue(parmName) : modelSettings.getParameterValue(groupName, parmName); if (selectedCP.isMultiValued()) { // use list, not text field valueText.setText(""); valueTable.removeAll(); if (modelValue != null && modelValue instanceof Object[]) { Object[] valArr = (Object[]) modelValue; for (int i = 0; i < valArr.length; i++) { TableItem item = new TableItem(valueTable, SWT.NONE); item.setText(valArr[i].toString()); } } } else { // single-valued parameter - use Text field valueTable.removeAll(); valueText.setText((modelValue == null) ? "" : modelValue.toString()); if ("Boolean".equals(selectedCP.getType())) { valueTextCombo.setText((modelValue == null) ? "" : modelValue.toString()); valueTextStackLayout.topControl = vtc2; } else { valueText.setText((modelValue == null) ? "" : modelValue.toString()); valueTextStackLayout.topControl = vtc1; } } } valueTextStack.layout(); enable(); } /* * (non-Javadoc) * * @see org.apache.uima.taeconfigurator.editors.ui.AbstractSectionParm#enable() */ @Override public void enable() { boolean mvValue = (null != selectedCP) && (selectedCP.isMultiValued()); valueText.setVisible((null != selectedCP) && (!selectedCP.isMultiValued())); valueTextCombo.setVisible((null != selectedCP) && (!selectedCP.isMultiValued())); addButton.setEnabled(mvValue); int selected = valueTable.getSelectionIndex(); editButton.setEnabled(mvValue && selected > -1); removeButton.setEnabled(mvValue && selected > -1); upButton.setEnabled(mvValue && selected > 0); downButton .setEnabled(mvValue && (selected > -1) && (selected < (valueTable.getItemCount() - 1))); valueText.getParent().redraw(); } /* * (non-Javadoc) * * @see org.eclipse.swt.widgets.Listener#handleEvent(org.eclipse.swt.widgets.Event) */ @Override public void handleEvent(Event event) { if (event.widget == valueText) { setParmValue(valueText.getText()); } else if (event.widget == valueTextCombo) { setParmValue(valueTextCombo.getText()); } else if (event.widget == addButton) { // open dialog to enter value String dataType = selectedCP.getType(); int validationFilter = "Boolean".equals(dataType) ? CommonInputDialog.TRUE_FALSE : "Integer".equals(dataType) ? CommonInputDialog.INTEGER : "Float".equals(dataType) ? CommonInputDialog.FLOAT : CommonInputDialog.ALLOK; CommonInputDialog dialog = new CommonInputDialog(this, "Add value", "Enter a value", validationFilter); if (dialog.open() == Window.CANCEL) { return; } TableItem item = new TableItem(valueTable, SWT.NONE); item.setText(dialog.getValue()); // update model setCurrentParameterValue(valueTable.getItems()); } else if (event.widget == editButton) { // open dialog to enter value TableItem item = valueTable.getItems()[valueTable.getSelectionIndex()]; CommonInputDialog dialog = new CommonInputDialog(this, "Add value", "Enter a value", CommonInputDialog.ALLOK, item.getText()); if (dialog.open() == Window.CANCEL) { return; } item.setText(dialog.getValue()); // update model setCurrentParameterValue(valueTable.getItems()); } else if (event.widget == upButton) { // update both model and gui: swap nodes int selection = valueTable.getSelectionIndex(); TableItem[] items = valueTable.getItems(); String temp = items[selection - 1].getText(); items[selection - 1].setText(items[selection].getText()); items[selection].setText(temp); valueTable.setSelection(selection - 1); setCurrentParameterValue(valueTable.getItems()); } else if (event.widget == downButton) { // update both model and gui: swap nodes int selection = valueTable.getSelectionIndex(); TableItem[] items = valueTable.getItems(); String temp = items[selection + 1].getText(); items[selection + 1].setText(items[selection].getText()); items[selection].setText(temp); valueTable.setSelection(selection + 1); setCurrentParameterValue(valueTable.getItems()); } else if (event.widget == removeButton || (event.widget == valueTable && event.character == SWT.DEL)) { handleRemove(event); } enable(); } /** * Sets the parm value. * * @param value * the new parm value */ private void setParmValue(String value) { if (null != value) { if ("".equals(value)) { value = null; // means clear the value } setCurrentParameterValue(value); } } /** * Handle remove. * * @param event * the event */ public void handleRemove(Event event) { valueTable.remove(valueTable.getSelectionIndices()); // update model setCurrentParameterValue(valueTable.getItems()); } /** * Gets the adds the button. * * @return the adds the button */ public Button getAddButton() { return addButton; } /** * Gets the removes the button. * * @return the removes the button */ public Button getRemoveButton() { return removeButton; } /** * Gets the value table. * * @return the value table */ public Table getValueTable() { return valueTable; } /** * Gets the value text. * * @return the value text */ public Text getValueText() { return valueText; } /** * Sets the currently selected parameter to the specified value. The string value will be * converted to the appropriate data type. This method works only for single-valued parameters. * * @param aValueString * the new current parameter value */ private void setCurrentParameterValue(String aValueString) { Object value = null; if (null != aValueString) { String paramType = selectedCP.getType(); try { if (ConfigurationParameter.TYPE_STRING.equals(paramType)) { value = aValueString; } else if (ConfigurationParameter.TYPE_INTEGER.equals(paramType)) { value = Integer.valueOf(aValueString); } else if (ConfigurationParameter.TYPE_LONG.equals(paramType)) { value = Long.valueOf(aValueString); } else if (ConfigurationParameter.TYPE_FLOAT.equals(paramType)) { value = Float.valueOf(aValueString); } else if (ConfigurationParameter.TYPE_DOUBLE.equals(paramType)) { value = Double.valueOf(aValueString); } else if (ConfigurationParameter.TYPE_BOOLEAN.equals(paramType)) { value = Boolean.valueOf(aValueString); } } catch (NumberFormatException e) { Utility.popMessage("Invalid Number", "If typing a floating point exponent, please complete the exponent.\nOtherwise, please retype the proper kind of number", MessageDialog.ERROR); return; } } setModelValue(value); } /** * Sets the currently selected parameter to the specified value. This method works only for * multi-valued parameters. The Table Items will be converted to the appropriate data type. * * @param aValues * Table Items, one for each value of the multi-valued param */ private void setCurrentParameterValue(TableItem[] aValues) { Object[] valueArr = null; String paramType = selectedCP.getType(); try { if (ConfigurationParameter.TYPE_STRING.equals(paramType)) { valueArr = new String[aValues.length]; for (int i = 0; i < valueArr.length; i++) { valueArr[i] = aValues[i].getText(); } } else if (ConfigurationParameter.TYPE_INTEGER.equals(paramType)) { valueArr = new Integer[aValues.length]; for (int i = 0; i < valueArr.length; i++) { valueArr[i] = Integer.valueOf(aValues[i].getText()); } } else if (ConfigurationParameter.TYPE_LONG.equals(paramType)) { valueArr = new Long[aValues.length]; for (int i = 0; i < valueArr.length; i++) { valueArr[i] = Long.valueOf(aValues[i].getText()); } } else if (ConfigurationParameter.TYPE_FLOAT.equals(paramType)) { valueArr = new Float[aValues.length]; for (int i = 0; i < valueArr.length; i++) { valueArr[i] = Float.valueOf(aValues[i].getText()); } } else if (ConfigurationParameter.TYPE_DOUBLE.equals(paramType)) { valueArr = new Double[aValues.length]; for (int i = 0; i < valueArr.length; i++) { valueArr[i] = Double.valueOf(aValues[i].getText()); } } else if (ConfigurationParameter.TYPE_BOOLEAN.equals(paramType)) { valueArr = new Boolean[aValues.length]; for (int i = 0; i < valueArr.length; i++) { valueArr[i] = Boolean.valueOf(aValues[i].getText()); } } else { throw new InternalErrorCDE("invalid state"); } } catch (NumberFormatException e) { Utility.popMessage("Invalid Number", "One or more values is not of the proper kind of number." + " If this entry is the only one with the wrong numeric type," + " Please retype the proper kind of number. Otherwise," + " use the source page to change all the values to the proper type.", MessageDialog.ERROR); return; } setModelValue(valueArr); } /** * Sets the model value. * * @param value * the new model value */ private void setModelValue(Object value) { String groupName = master.getSelectedParamGroupName(); boolean changed = false; if (COMMON_GROUP.equals(groupName)) { ConfigurationGroup[] groups = getConfigurationParameterDeclarations() .getConfigurationGroups(); for (int i = 0; i < groups.length; i++) { String[] groupNames = groups[i].getNames(); for (int j = 0; j < groupNames.length; j++) { if (isSameValue(value, modelSettings.getParameterValue(groupNames[j], selectedCP.getName()))) { continue; } modelSettings.setParameterValue(groupNames[j], selectedCP.getName(), value); changed = true; } } } else if (NOT_IN_ANY_GROUP.equals(groupName)) { if (!isSameValue(value, modelSettings.getParameterValue(selectedCP.getName()))) { modelSettings.setParameterValue(selectedCP.getName(), value); changed = true; } } else { if (!isSameValue(value, modelSettings.getParameterValue(groupName, selectedCP.getName()))) { modelSettings.setParameterValue(groupName, selectedCP.getName(), value); changed = true; } } if (changed) { editor.setFileDirty(); } } /** * Checks if is same value. * * @param v1 * the v 1 * @param v2 * the v 2 * @return true, if is same value */ private boolean isSameValue(Object v1, Object v2) { if (v1 instanceof Object[]) { return (Arrays.equals((Object[]) v1, (Object[]) v2)); } else { if (null == v1) { return null == v2; } return v1.equals(v2); } } }
package com.readytalk.makrut.util; import static com.codahale.metrics.MetricRegistry.name; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ScheduledThreadPoolExecutor; import com.codahale.metrics.MetricRegistry; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.SettableFuture; import com.readytalk.makrut.strategy.BackoffStrategy; import com.readytalk.makrut.strategy.RetryStrategy; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; public class FutureUtilsTest { private final Object obj = new Object(); private ListeningScheduledExecutorService executorService; @Mock private MakrutCommandWrapper<Object> command; @Mock private BackoffStrategy backoffStrategy; @Mock private RetryStrategy retryStrategy; @Mock private CacheWrapper cache; @Mock private Callable<Object> callable; private FutureUtils utils; @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); executorService = MoreExecutors.listeningDecorator(new ScheduledThreadPoolExecutor(1)); utils = new FutureUtils(new MetricRegistry(), name(callable.getClass())); } @After public void tearDown() throws Exception { executorService.shutdownNow(); } @Test public void addRetry_OnFailureWithError_SkipsRetry() throws Exception { SettableFuture<Object> value = SettableFuture.create(); when(retryStrategy.shouldRetry(anyInt(), anyLong(), any(Exception.class))).thenReturn(true, false); when(command.call()).thenReturn(obj); ListenableFuture<Object> withRetry = utils.addRetry(executorService, retryStrategy, Optional.of(backoffStrategy), value, command); Error th = new AssertionError(); try { value.setException(th); } catch (Error er) { // Working around that settable futures immediately throw errors. } try { withRetry.get(); fail("Expected exception."); } catch (ExecutionException ex) { assertEquals(th, ex.getCause()); } verify(retryStrategy, never()).shouldRetry(anyInt(), anyLong(), any(Exception.class)); } @Test public void addRetry_OnFailure_TriggersRetry() throws Exception { SettableFuture<Object> value = SettableFuture.create(); when(retryStrategy.shouldRetry(anyInt(), anyLong(), any(Exception.class))).thenReturn(true, false); when(command.call()).thenReturn(obj); ListenableFuture<Object> withRetry = utils.addRetry(executorService, retryStrategy, Optional.of(backoffStrategy), value, command); value.setException(new Exception()); assertEquals(obj, withRetry.get()); verify(retryStrategy).shouldRetry(anyInt(), anyLong(), any(Exception.class)); } @Test public void addRetry_OnFailure_PushesBack() throws Exception { SettableFuture<Object> value = SettableFuture.create(); when(retryStrategy.shouldRetry(anyInt(), anyLong(), any(Exception.class))).thenReturn(true, false); when(command.call()).thenReturn(obj); ListenableFuture<Object> withRetry = utils.addRetry(executorService, retryStrategy, Optional.of(backoffStrategy), value, command); value.setException(new Exception()); assertEquals(obj, withRetry.get()); verify(command).getAndSetNextBackoff(eq(backoffStrategy)); } @Test public void addRetry_OnMultipleFailure_CanTriggerMultipleRetries() throws Exception { SettableFuture<Object> value = SettableFuture.create(); when(retryStrategy.shouldRetry(anyInt(), anyLong(), any(Exception.class))).thenReturn(true, true, false); when(command.call()).thenThrow(new RuntimeException()).thenReturn(obj); ListenableFuture<Object> withRetry = utils.addRetry(executorService, retryStrategy, Optional.of(backoffStrategy), value, command); value.setException(new Exception()); assertEquals(obj, withRetry.get()); verify(retryStrategy, times(2)).shouldRetry(anyInt(), anyLong(), any(Exception.class)); } @Test public void addRetry_OnFailureWithNegativeRetry_ReturnsException() throws Exception { SettableFuture<Object> value = SettableFuture.create(); when(retryStrategy.shouldRetry(anyInt(), anyLong(), any(Exception.class))).thenReturn(false); ListenableFuture<Object> withRetry = utils.addRetry(executorService, retryStrategy, Optional.of(backoffStrategy), value, command); Exception testEx = new Exception(); value.setException(testEx); try { withRetry.get(); fail("Expected failure."); } catch (Exception ex) { assertEquals(testEx, Throwables.getRootCause(ex)); verify(retryStrategy).shouldRetry(anyInt(), anyLong(), any(Exception.class)); } } @Test public void withFallbackCache_OnFailureWhenPresent_ReplacesValue() throws Exception { when(cache.getOptional(callable)).thenReturn(Optional.of(obj)); SettableFuture<Object> settable = SettableFuture.create(); ListenableFuture<Object> future = utils.withFallbackCache(callable, settable, cache); settable.setException(new Exception()); assertEquals(obj, future.get()); } @Test public void withFallbackCache_OnFailureWhenAbsent_ReturnsFailedFuture() throws Exception { when(cache.getOptional(callable)).thenReturn(Optional.absent()); final Exception ex = new Exception(); SettableFuture<Object> settable = SettableFuture.create(); ListenableFuture<Object> future = utils.withFallbackCache(callable, settable, cache); settable.setException(ex); try { future.get(); fail("Expected exception."); } catch (ExecutionException ee) { assertEquals(ex, ee.getCause()); } } @Test public void withFallbackCache_OnSuccess_DoesNotRun() throws Exception { when(cache.getOptional(callable)).thenReturn(Optional.of(obj)); SettableFuture<Object> settable = SettableFuture.create(); ListenableFuture<Object> future = utils.withFallbackCache(callable, settable, cache); settable.set(obj); assertEquals(obj, future.get()); verify(cache, never()).getOptional(eq(callable)); } @Test public void withFallbackCache_IfCancelled_RemainsCancelled() throws Exception { when(cache.getOptional(callable)).thenReturn(Optional.of(obj)); SettableFuture<Object> settable = SettableFuture.create(); ListenableFuture<Object> future = utils.withFallbackCache(callable, settable, cache); settable.cancel(true); assertTrue(future.isCancelled()); } }
package org.yetiz.utils.hbase; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.yetiz.utils.hbase.exception.CatcherRaiseException; import org.yetiz.utils.hbase.exception.DuplicateException; import org.yetiz.utils.hbase.exception.UnHandledException; import org.yetiz.utils.hbase.exception.YHBaseException; import java.util.List; /** * Created by yeti on 16/4/1. */ public class HBaseAdmin { private Admin admin; protected HBaseAdmin(Admin admin) { this.admin = admin; } public boolean tableExists(TableName tableName) { try { checkTableNotExist(tableName); return false; } catch (Throwable throwable) { return true; } } private void checkTableNotExist(TableName tableName) { CATCHER(() -> { try { if (admin().tableExists(tableName.get())) { throw new DuplicateException("table name existed"); } } catch (Throwable throwable) { throw new UnHandledException(throwable); } }); } private Admin admin() { return admin; } private final void CATCHER(Runnable task) { try { task.run(); } catch (YHBaseException d) { throw d; } catch (Throwable t) { throw new CatcherRaiseException(t); } } public void snapshot(String snapshotName, TableName tableName) { try { admin().snapshot(snapshotName, tableName.get()); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void restoreSnapshot(String snapshotName) { try { admin().restoreSnapshot(snapshotName); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void close() { try { admin().close(); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void cloneSnapshot(String snapshotName, TableName tableName) { try { admin().cloneSnapshot(snapshotName, tableName.get()); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public List<HBaseProtos.SnapshotDescription> listSnapshots() { try { return admin().listSnapshots(); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void deleteSnapshot(String snapshotName) { try { admin().deleteSnapshot(snapshotName); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public HTableDescriptor tableDescriptor(TableName tableName) { try { return admin().getTableDescriptor(tableName.get()); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void updateTable(TableName tableName, HTableDescriptor tableDescriptor) { try { admin().modifyTable(tableName.get(), tableDescriptor); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public HTableDescriptor[] listTables() { try { return admin().listTables(); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void enableTable(TableName tableName) { if (!isTableDisabled(tableName)) { return; } try { admin().enableTable(tableName.get()); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public boolean isTableDisabled(TableName tableName) { try { return admin().isTableDisabled(tableName.get()); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void truncateTable(TableName tableName) { truncateTable(tableName, false); } public void truncateTable(TableName tableName, boolean preserveSplit) { disableTable(tableName); try { admin().truncateTable(tableName.get(), preserveSplit); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void disableTable(TableName tableName) { if (!isTableDisabled(tableName)) { try { admin().disableTable(tableName.get()); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } } public void deleteTable(TableName tableName) { disableTable(tableName); try { admin().deleteTable(tableName.get()); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void updateCompression(TableName tableName, String family, Algorithm compression) { try { admin().modifyColumn(tableName.get(), new HColumnDescriptor(family).setCompressionType(compression)); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void addColumnFamily(TableName tableName, String family, Algorithm compression) { try { admin().addColumn(tableName.get(), new HColumnDescriptor(family).setCompressionType(compression)); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void deleteColumnFamily(TableName tableName, String family) { try { admin().deleteColumn(tableName.get(), HBaseClient.bytes(family)); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void majorCompact(TableName tableName) { majorCompact(tableName, null); } public void majorCompact(TableName tableName, String family) { try { admin().majorCompact(tableName.get(), family == null ? null : HBaseClient.bytes(family)); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void split(TableName tableName, byte[] splitPoint) { try { admin().split(tableName.get(), splitPoint); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public boolean balancer() { try { return admin().balancer(); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void createTable(TableName tableName) { checkTableNotExist(tableName); try { admin().createTable(newTableDescriptor(tableName)); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } private HTableDescriptor newTableDescriptor(TableName tableName) { try { return new HTableDescriptor(tableName.get()); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } public void createTable(TableName tableName, byte[] startKey, byte[] endKey, int numberOfRegions) { checkTableNotExist(tableName); try { admin().createTable(newTableDescriptor(tableName), startKey, endKey, numberOfRegions); } catch (Throwable throwable) { throw new UnHandledException(throwable); } } }
package com.iwillow.app.android.ui.view; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.TypeEvaluator; import android.animation.ValueAnimator; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Rect; import android.os.Build; import android.support.annotation.ColorInt; import android.support.annotation.RequiresApi; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import android.view.ViewConfiguration; import android.view.animation.AccelerateDecelerateInterpolator; import com.iwillow.app.android.R; import static com.iwillow.app.android.util.DimenUtil.dp2px; /** * Created by https://github.com/iwillow/ on 2017/1/9. */ public class GravityInstrumentView extends View { private float mContainerPadding; private float mRadius; //the outer circle radius private float mTheta; private float mProgressX; private float mProgressY; private float mDotHorizontalGap = 10f; private float mDotWidth = 20f; private float mDotThickness = 5f; private float mControlCircleX; private float mControlCircleY; private int mSlop; private float mCircleX; private float mCircleY; private float mMotionDownX; private float mMotionDownY; private boolean mMoving; private boolean mOnInnerCircleTouched; private float mMaxDistance; private float mRadiusControl; private float mRadiusControlInner; private OnInnerCircleMoveListener mOnInnerCircleMoveListener; public boolean mEnableControl = true; private Paint mPaintInnerCircle; private Paint mPaintInnerOuterCircle; private Paint mPaintDot; private Paint mPaintControlCircle; private Paint mPaintControlCircleInner; private Paint mBitmapPaint; private Matrix mMatrix = new Matrix(); private Bitmap mBackgroundScaleBmp; private Bitmap mInnerBmp; private Bitmap mOuterBmp; private boolean mGestureMode = true; private float mInnerOuterCircleStokeWidth; private float mControlCircleStokeWidth; private int mInnerCircleColor; private int mInnerOuterCircleColor; private int mControlCircleColor; private int mControlCircleInnerColor; private int mDotColor; private float mRadiusFraction = 4.00f; public GravityInstrumentView(Context context) { this(context, null); } public GravityInstrumentView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public GravityInstrumentView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(attrs, defStyleAttr); } @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) public GravityInstrumentView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); init(attrs, defStyleAttr); } private void init(AttributeSet attrs, int defStyleAttr) { final TypedArray a = getContext().obtainStyledAttributes( attrs, R.styleable.GravityInstrumentView, defStyleAttr, 0); mSlop = ViewConfiguration.get(getContext()).getScaledTouchSlop(); mContainerPadding = a.getDimension(R.styleable.GravityInstrumentView_gravityInstrumentView_containerPadding, dp2px(getResources(), 5)); mDotHorizontalGap = a.getDimension(R.styleable.GravityInstrumentView_gravityInstrumentView_dotHorizontalGap, dp2px(getResources(), 4f)); mDotWidth = a.getDimension(R.styleable.GravityInstrumentView_gravityInstrumentView_dotWidth, dp2px(getResources(), 0.5f)); mDotThickness = a.getDimension(R.styleable.GravityInstrumentView_gravityInstrumentView_dotThickness, dp2px(getResources(), 0.5f)); mDotColor = a.getColor(R.styleable.GravityInstrumentView_gravityInstrumentView_dotColor, getResources().getColor(R.color.dotColor)); mInnerCircleColor = a.getColor(R.styleable.GravityInstrumentView_gravityInstrumentView_innerCircleColor, getResources().getColor(R.color.default_innerCircleColor)); mInnerOuterCircleColor = a.getColor(R.styleable.GravityInstrumentView_gravityInstrumentView_innerOuterCircleColor, getResources().getColor(R.color.default_innerOuterCircleColor)); mInnerOuterCircleStokeWidth = a.getDimension(R.styleable.GravityInstrumentView_gravityInstrumentView_innerOuterCircleStokeWidth, dp2px(getResources(), 3f)); mControlCircleColor = a.getColor(R.styleable.GravityInstrumentView_gravityInstrumentView_controlCircleColor, Color.GRAY); mControlCircleStokeWidth = a.getDimension(R.styleable.GravityInstrumentView_gravityInstrumentView_controlCircleStokeWidth, dp2px(getResources(), 3f)); mControlCircleInnerColor = a.getColor(R.styleable.GravityInstrumentView_gravityInstrumentView_controlCircleInnerColor, Color.WHITE); a.recycle(); mPaintInnerCircle = new Paint(); mPaintInnerCircle.setAntiAlias(true); mPaintInnerCircle.setDither(true); mPaintInnerCircle.setColor(mInnerCircleColor); mPaintInnerCircle.setStyle(Paint.Style.FILL); mPaintInnerOuterCircle = new Paint(); mPaintInnerOuterCircle.setAntiAlias(true); mPaintInnerOuterCircle.setDither(true); mPaintInnerOuterCircle.setColor(mInnerOuterCircleColor); mPaintInnerOuterCircle.setStyle(Paint.Style.STROKE); mPaintInnerOuterCircle.setStrokeWidth(mInnerOuterCircleStokeWidth); mPaintDot = new Paint(); mPaintDot.setAntiAlias(true); mPaintDot.setDither(true); mPaintDot.setColor(mDotColor); mPaintDot.setStyle(Paint.Style.FILL); mPaintDot.setStrokeWidth(mDotThickness); mPaintControlCircle = new Paint(); mPaintControlCircle.setAntiAlias(true); mPaintControlCircle.setDither(true); mPaintControlCircle.setColor(mControlCircleColor); mPaintControlCircle.setStyle(Paint.Style.STROKE); mPaintControlCircle.setStrokeWidth(mControlCircleStokeWidth); mPaintControlCircleInner = new Paint(); mPaintControlCircleInner.setAntiAlias(true); mPaintControlCircleInner.setDither(true); mPaintControlCircleInner.setColor(mControlCircleInnerColor); mPaintControlCircleInner.setStyle(Paint.Style.FILL); mBitmapPaint = new Paint(); mBitmapPaint.setAntiAlias(true); mBitmapPaint.setDither(true); mBitmapPaint.setStyle(Paint.Style.STROKE); Bitmap backgroundBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.gravity_mode_bitmap_background); mMatrix.reset(); mMatrix.setScale(0.48f, 0.48f); mBackgroundScaleBmp = Bitmap.createBitmap(backgroundBitmap, 0, 0, backgroundBitmap.getWidth(), backgroundBitmap.getHeight(), mMatrix, true); Bitmap innerBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.gravity_mode_bitmap_inner); mMatrix.reset(); mMatrix.setScale(0.52f, 0.52f); mInnerBmp = Bitmap.createBitmap(innerBitmap, 0, 0, innerBitmap.getWidth(), innerBitmap.getHeight(), mMatrix, true); Bitmap outer = BitmapFactory.decodeResource(getResources(), R.drawable.gravity_mode_bitmap_outer); mMatrix.reset(); mMatrix.setScale(0.38f, 0.38f); mOuterBmp = Bitmap.createBitmap(outer, 0, 0, outer.getWidth(), outer.getHeight(), mMatrix, true); } public void setDotColor(@ColorInt int dotColor) { if (dotColor != mDotColor) { mDotColor = dotColor; invalidate(); } } public void setControlCircleColor(@ColorInt int controlCircleColor) { if (controlCircleColor != mControlCircleColor) { mControlCircleColor = controlCircleColor; invalidate(); } } public void setControlCircleInnerColor(@ColorInt int controlCircleInnerColor) { if (controlCircleInnerColor != mControlCircleInnerColor) { mControlCircleInnerColor = controlCircleInnerColor; invalidate(); } } public int getControlCircleInnerColor() { return mControlCircleInnerColor; } public int getControlCircleColor() { return mControlCircleColor; } public int getDotColor() { return mDotColor; } public int getInnerCircleColor() { return mInnerCircleColor; } public void setInnerCircleColor(@ColorInt int innerCircleColor) { if (innerCircleColor != mInnerCircleColor) { mInnerCircleColor = innerCircleColor; invalidate(); } } public void setInnerOuterCircleStokeWidth(int innerOuterCircleStokeWidth) { if (mInnerOuterCircleStokeWidth != innerOuterCircleStokeWidth) { mInnerOuterCircleStokeWidth = innerOuterCircleStokeWidth; invalidate(); } } public void setControlCircleStokeWidth(int controlCircleStokeWidth) { if (mControlCircleStokeWidth != controlCircleStokeWidth && controlCircleStokeWidth >= 0) { mControlCircleStokeWidth = controlCircleStokeWidth; invalidate(); } } public void setConainerPadding(float containerPadding) { if (mContainerPadding != containerPadding && containerPadding >= 0) { this.mContainerPadding = containerPadding; invalidate(); } } public float getContainerPadding() { return mContainerPadding; } public void setDotHorizontalGap(float dotHorizontalGap) { if (mDotHorizontalGap != dotHorizontalGap && dotHorizontalGap > 0) { this.mDotHorizontalGap = dotHorizontalGap; invalidate(); } } public float getDotHorizontalGap() { return mDotHorizontalGap; } public void setDotWidth(float dotWidth) { if (mDotWidth != dotWidth && dotWidth > 0) { this.mDotWidth = dotWidth; invalidate(); } } public float getDotWidth() { return mDotWidth; } public void setDotThickness(float dotThickness) { if (mDotThickness != dotThickness && dotThickness > 0) { this.mDotThickness = dotThickness; invalidate(); } } public float getDotThickness() { return mDotThickness; } public void setInnerOuterCircleStokeWidth(float innerOuterCircleStokeWidth) { if (mInnerOuterCircleStokeWidth != innerOuterCircleStokeWidth) { mInnerOuterCircleStokeWidth = innerOuterCircleStokeWidth; invalidate(); } } public float getInnerOuterCircleStokeWidth() { return mInnerOuterCircleStokeWidth; } public void setRadiusFraction(float radiusFraction) { if (radiusFraction != mRadiusFraction && radiusFraction > 0) { mRadiusFraction = radiusFraction; invalidate(); } } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int width = (int) dp2px(getResources(), 10000); //min int widthSize = MeasureSpec.getSize(widthMeasureSpec); int widthMode = MeasureSpec.getMode(widthMeasureSpec); int heightSize = MeasureSpec.getSize(heightMeasureSpec); int heightMode = MeasureSpec.getMode(heightMeasureSpec); if (widthMode == MeasureSpec.AT_MOST || widthMode == MeasureSpec.UNSPECIFIED || heightMode == MeasureSpec.AT_MOST || heightMode == MeasureSpec.UNSPECIFIED) { width = (int) dp2px(getResources(), 150); } else { if (widthMode == MeasureSpec.EXACTLY) { width = Math.min(widthSize, width); } if (heightMode == MeasureSpec.EXACTLY) { width = Math.min(heightSize, width); } } setMeasuredDimension(width, width); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); mRadius = 0.5f * (Math.min(w, h) - getPaddingLeft() - getPaddingRight()); mRadiusControl = mRadius * 0.15f; mRadiusControlInner = mRadius * 0.10f; mCircleX = getMeasuredWidth() / 2; mCircleY = getMeasuredHeight() / 2; mMaxDistance = mRadius - mContainerPadding - mRadiusFraction * mRadiusControl; } @Override public boolean onTouchEvent(MotionEvent event) { if (!isEnabled()) { return false; } if (!isGestureMode()) { return super.onTouchEvent(event); } switch (event.getAction()) { case MotionEvent.ACTION_DOWN: mMotionDownX = event.getX(); mMotionDownY = event.getY(); if (!mMoving) { mOnInnerCircleTouched = onInnerCircleTouched(event); } break; case MotionEvent.ACTION_MOVE: if (mMoving) { calibrateMotion(event); } else if (mOnInnerCircleTouched && canMove(event)) { mMoving = true; calibrateMotion(event); } else { } break; case MotionEvent.ACTION_UP: case MotionEvent.ACTION_CANCEL: mMotionDownX = 0; mMotionDownY = 0; mOnInnerCircleTouched = false; if (mMoving) { reset(); } break; } return true; } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); drawBackgroundBitmap(canvas); drawHorizontal(canvas, (int) mProgressX); drawVertical(canvas, (int) mProgressX); drawCircle(canvas); drawInnerBitmap(canvas, mTheta); drawOuterBitmap(canvas, -mTheta); if (mMoving) { drawControlCircle(canvas); } } public boolean isGestureMode() { return mGestureMode; } public void enableGestureMode(boolean enable) { this.mGestureMode = enable; } private void drawInnerBitmap(Canvas canvas, float delta) { mMatrix.reset(); mMatrix.postTranslate(getWidth() / 2 - mInnerBmp.getWidth() / 2, getHeight() / 2 - mInnerBmp.getHeight() / 2); mMatrix.postRotate(delta, getWidth() / 2, getHeight() / 2); canvas.drawBitmap(mInnerBmp, mMatrix, mBitmapPaint); } private void drawOuterBitmap(Canvas canvas, float delta) { mMatrix.reset(); mMatrix.postTranslate(getWidth() / 2 - mOuterBmp.getWidth() / 2, getHeight() / 2 - mOuterBmp.getHeight() / 2); mMatrix.postRotate(delta, getWidth() / 2, getHeight() / 2); canvas.drawBitmap(mOuterBmp, mMatrix, mBitmapPaint); } private void drawBackgroundBitmap(Canvas canvas) { int width = this.getWidth(); int height = this.getHeight(); int left = width / 2 - mBackgroundScaleBmp.getWidth() / 2; int top = height / 2 - mBackgroundScaleBmp.getHeight() / 2; int right = width / 2 + mBackgroundScaleBmp.getWidth() / 2; int bottom = height / 2 + mBackgroundScaleBmp.getHeight() / 2; Rect dst = new Rect(left, top, right, bottom); canvas.drawBitmap(mBackgroundScaleBmp, null, dst, mBitmapPaint); } private void drawCircle(Canvas canvas) { canvas.save(); canvas.translate(getWidth() / 2, getHeight() / 2); canvas.drawCircle(0, 0, mRadius * 0.08f, mPaintInnerCircle); canvas.drawCircle(0, 0, mRadius * 0.12f, mPaintInnerOuterCircle); canvas.restore(); } private void drawHorizontal(Canvas canvas, int progress) { canvas.save(); canvas.translate(getWidth() / 2, getHeight() / 2); canvas.rotate(90); float r = 0.48f * mRadius; for (int i = -100; i <= 100; i = i + 2) { float r1 = i * r / 100f; canvas.drawLine(mDotHorizontalGap, r1, mDotHorizontalGap + mDotWidth, r1, mPaintDot);//the dot in the right if ((i + progress) % 5 == 0) { canvas.drawLine(-mDotHorizontalGap - mDotWidth, r1, mDotHorizontalGap + mDotWidth, r1, mPaintDot);//the dot in the right } canvas.drawLine(-mDotHorizontalGap, r1, -mDotHorizontalGap - mDotWidth, r1, mPaintDot);//the dot in the left } canvas.restore(); } private void drawVertical(Canvas canvas, int progress) { canvas.save(); canvas.translate(getWidth() / 2, getHeight() / 2); float r = 0.48f * mRadius; for (int i = -100; i <= 100; i = i + 2) { float r1 = i * r / 100f; canvas.drawLine(mDotHorizontalGap, r1, mDotHorizontalGap + mDotWidth, r1, mPaintDot);//the dot in the right if ((progress + i) % 5 == 0) { canvas.drawLine(-mDotHorizontalGap - mDotWidth, r1, mDotHorizontalGap + mDotWidth, r1, mPaintDot);//the dot in the right } canvas.drawLine(-mDotHorizontalGap, r1, -mDotHorizontalGap - mDotWidth, r1, mPaintDot);//the dot in the left } canvas.restore(); } private void drawControlCircle(Canvas canvas) { canvas.drawCircle(mControlCircleX, mControlCircleY, mRadiusControl, mPaintControlCircle); canvas.drawCircle(mControlCircleX, mControlCircleY, mRadiusControlInner, mPaintControlCircleInner); } public boolean isEnableControl() { return mEnableControl; } public void enableControl(boolean enable) { mEnableControl = enable; } private boolean onInnerCircleTouched(MotionEvent event) { float radius = mRadiusControlInner; float x = event.getX(); float y = event.getY(); return (x - mCircleX) * (x - mCircleX) + (y - mCircleY) * (y - mCircleY) <= radius * radius; } private boolean canMove(MotionEvent event) { return ((event.getX() - mMotionDownX) * (event.getX() - mMotionDownX) + (event.getY() - mMotionDownY) * (event.getY() - mMotionDownY) > mSlop * mSlop); } private void calibrateMotion(MotionEvent event) { calibrateXY(event.getX(), event.getY()); } public void move(float fractionX, float fractionY) { if (!mEnableControl) { mMoving = false; return; } if (fractionX == 0f && fractionY == 0f) { mMoving = false; } else { mMoving = true; } if (fractionX > 1.0f) { fractionX = 1.0f; } if (fractionY > 1.0f) { fractionY = 1.0f; } if (fractionX < -1.0f) { fractionX = -1.0f; } if (fractionY < -1.0f) { fractionY = -1.0f; } float x = mCircleX + mMaxDistance * fractionX; float y = mCircleY + mMaxDistance * fractionY; calibrateXY(x, y); } private void calibrateXY(float x, float y) { float x1, y1; float theta; if (x > mCircleX && y < mCircleY) {//the first quadrant theta = (float) Math.atan((mCircleY - y) / (x - mCircleX)); x1 = (float) (mCircleX + mMaxDistance * Math.cos(theta)); y1 = (float) (mCircleY - mMaxDistance * Math.sin(theta)); if (x > x1) { x = x1; } if (y < y1) { y = y1; } mTheta = (float) (90 - 180 * theta / Math.PI); } else if (x > mCircleX && y > mCircleY) { //the second quadrant theta = (float) Math.atan((y - mCircleY) / (x - mCircleX)); x1 = (float) (mCircleX + mMaxDistance * Math.cos(theta)); y1 = (float) (mCircleY + mMaxDistance * Math.sin(theta)); if (x > x1) { x = x1; } if (y > y1) { y = y1; } mTheta = (float) (90 + 180 * theta / Math.PI); } else if (x < mCircleX && y > mCircleY) { //the third quadrant theta = (float) (Math.atan((y - mCircleY) / (mCircleX - x))); x1 = (float) (mCircleX - mMaxDistance * Math.cos(theta)); y1 = (float) (mCircleY + mMaxDistance * Math.sin(theta)); if (x < x1) { x = x1; } if (y > y1) { y = y1; } mTheta = (float) (270 - 180 * theta / Math.PI); } else if (x < mCircleX && y < mCircleY) { //the fourth quadrant theta = (float) (Math.atan((mCircleY - y) / (mCircleX - x))); x1 = (float) (mCircleX - mMaxDistance * Math.cos(theta)); y1 = (float) (mCircleY - mMaxDistance * Math.sin(theta)); if (x < x1) { x = x1; } if (y < y1) { y = y1; } mTheta = (float) (270 + 180 * theta / Math.PI); } else if (x == mCircleX && y != mCircleY) {//Y axis if (y > mCircleY + mMaxDistance) { y = mCircleY + mMaxDistance; mTheta = 0; } else if (y < (mCircleY - mMaxDistance)) { y = mCircleY - mMaxDistance; mTheta = 180f; } } else if (y == mCircleY && x != mCircleX) {//X axis if (x > mCircleX + mMaxDistance) { x = mCircleX + mMaxDistance; mTheta = 270f; } else if (x < (mCircleX - mMaxDistance)) { x = mCircleX - mMaxDistance; mTheta = 90f; } } else { mTheta = 0; } mControlCircleX = x; mControlCircleY = y; float fractionX = (mControlCircleX - mCircleX) / mMaxDistance; float fractionY = (mControlCircleY - mCircleY) / mMaxDistance; mProgressX = 100f * fractionX; mProgressY = 100f * fractionY; if (mOnInnerCircleMoveListener != null) { mOnInnerCircleMoveListener.onInnerCircleMove(mControlCircleX, mControlCircleY, fractionX, fractionY); } invalidate(); } public void reset() { float theta = mTheta; if (mTheta > 180) { theta = -(360 - mTheta); } CirclePosition startValue = new CirclePosition(mControlCircleX, mControlCircleY, mProgressX, mProgressY, theta); CirclePosition endValue = new CirclePosition(mCircleX, mCircleY, 0, 0, 0); ValueAnimator valueAnimator = ValueAnimator.ofObject(new PositionEvaluator(), startValue, endValue); valueAnimator.setDuration(500); valueAnimator.setInterpolator(new AccelerateDecelerateInterpolator()); valueAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { CirclePosition circlePosition = (CirclePosition) animation.getAnimatedValue(); mControlCircleX = circlePosition.getX(); mControlCircleY = circlePosition.getY(); mProgressX = circlePosition.getProgressX(); mProgressY = circlePosition.getProgressY(); mTheta = circlePosition.getTheta(); if (mOnInnerCircleMoveListener != null) { float fractionX = mProgressX / 100f; float fractionY = mProgressY / 100f; mOnInnerCircleMoveListener.onInnerCircleMove(mControlCircleX, mControlCircleY, fractionX, fractionY); } invalidate(); } }); valueAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { super.onAnimationEnd(animation); mMoving = false; mTheta = 0; mProgressX = 0; mProgressY = 0; mControlCircleX = mCircleX; mControlCircleX = mCircleY; if (mOnInnerCircleMoveListener != null) { mOnInnerCircleMoveListener.onInnerCircleMove(mCircleX, mCircleY, 0f, 0f); } invalidate(); } }); valueAnimator.start(); } public void setOnInnerCircleMoveListener(OnInnerCircleMoveListener listener) { this.mOnInnerCircleMoveListener = listener; } public interface OnInnerCircleMoveListener { void onInnerCircleMove(float x, float y, float fractionX, float fractionY); } private static class CirclePosition { final private float x; final private float y; final private float progressX; final private float progressY; final private float theta; public CirclePosition(float x, float y, float progressX, float progressY, float theta) { this.x = x; this.y = y; this.progressX = progressX; this.progressY = progressY; this.theta = theta; } public float getX() { return x; } public float getY() { return y; } public float getProgressX() { return progressX; } public float getProgressY() { return progressY; } public float getTheta() { return theta; } } private static class PositionEvaluator implements TypeEvaluator<CirclePosition> { @Override public CirclePosition evaluate(float fraction, CirclePosition startValue, CirclePosition endValue) { float x = startValue.getX() + fraction * (endValue.getX() - startValue.getX()); float y = startValue.getY() + fraction * (endValue.getY() - startValue.getY()); float progressX = startValue.getProgressX() + fraction * (endValue.getProgressX() - startValue.getProgressX()); float progressY = startValue.getProgressY() + fraction * (endValue.getProgressY() - startValue.getProgressY()); float theta = startValue.getTheta() + fraction * (endValue.getTheta() - startValue.getTheta()); return new CirclePosition(x, y, progressX, progressY, theta); } } }
/* * Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.test.database.auto; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.exception.OConcurrentModificationException; import com.orientechnologies.orient.core.metadata.schema.OSchema; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.record.impl.ORecordBytes; import com.orientechnologies.orient.core.tx.ORollbackException; import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE; import com.orientechnologies.orient.enterprise.channel.binary.OResponseProcessingException; import org.testng.Assert; import org.testng.annotations.Optional; import org.testng.annotations.Parameters; import org.testng.annotations.Test; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; @Test(groups = "dictionary") public class TransactionOptimisticTest extends DocumentDBBaseTest { @Parameters(value = "url") public TransactionOptimisticTest(@Optional String iURL) { super(iURL); } @Test public void testTransactionOptimisticRollback() throws IOException { if (database.getClusterIdByName("binary") == -1) database.addCluster("binary"); long rec = database.countClusterElements("binary"); database.begin(); ORecordBytes recordBytes = new ORecordBytes("This is the first version".getBytes()); recordBytes.save("binary"); database.rollback(); Assert.assertEquals(database.countClusterElements("binary"), rec); } @Test(dependsOnMethods = "testTransactionOptimisticRollback") public void testTransactionOptimisticCommit() throws IOException { if (database.getClusterIdByName("binary") == -1) database.addCluster("binary"); long tot = database.countClusterElements("binary"); database.begin(); ORecordBytes recordBytes = new ORecordBytes("This is the first version".getBytes()); recordBytes.save("binary"); database.commit(); Assert.assertEquals(database.countClusterElements("binary"), tot + 1); } @Test(dependsOnMethods = "testTransactionOptimisticCommit") public void testTransactionOptimisticConcurrentException() throws IOException { if (database.getClusterIdByName("binary") == -1) database.addCluster("binary"); ODatabaseDocumentTx db2 = new ODatabaseDocumentTx(database.getURL()); db2.open("admin", "admin"); database.activateOnCurrentThread(); ORecordBytes record1 = new ORecordBytes("This is the first version".getBytes()); record1.save("binary"); try { database.begin(); // RE-READ THE RECORD record1.load(); ODatabaseRecordThreadLocal.INSTANCE.set(db2); ORecordBytes record2 = db2.load(record1.getIdentity()); record2.setDirty(); record2.fromStream("This is the second version".getBytes()); record2.save(); ODatabaseRecordThreadLocal.INSTANCE.set(database); record1.setDirty(); record1.fromStream("This is the third version".getBytes()); record1.save(); database.commit(); Assert.assertTrue(false); } catch (OResponseProcessingException e) { Assert.assertTrue(e.getCause() instanceof OConcurrentModificationException); database.rollback(); } catch (OConcurrentModificationException e) { Assert.assertTrue(true); database.rollback(); } finally { database.close(); db2.activateOnCurrentThread(); db2.close(); } } @Test(dependsOnMethods = "testTransactionOptimisticConcurrentException") public void testTransactionOptimisticCacheMgmt1Db() throws IOException { if (database.getClusterIdByName("binary") == -1) database.addCluster("binary"); ORecordBytes record = new ORecordBytes("This is the first version".getBytes()); record.save(); try { database.begin(); // RE-READ THE RECORD record.load(); int v1 = record.getRecordVersion().getCounter(); record.setDirty(); record.fromStream("This is the second version".getBytes()); record.save(); database.commit(); record.reload(); Assert.assertEquals(record.getRecordVersion().getCounter(), v1 + 1); Assert.assertTrue(new String(record.toStream()).contains("second")); } finally { database.close(); } } @Test(dependsOnMethods = "testTransactionOptimisticCacheMgmt1Db") public void testTransactionOptimisticCacheMgmt2Db() throws IOException { if (database.getClusterIdByName("binary") == -1) database.addCluster("binary"); ODatabaseDocumentTx db2 = new ODatabaseDocumentTx(database.getURL()); db2.open("admin", "admin"); ORecordBytes record1 = new ORecordBytes("This is the first version".getBytes()); record1.save(); try { ODatabaseRecordThreadLocal.INSTANCE.set(database); database.begin(); // RE-READ THE RECORD record1.load(); int v1 = record1.getRecordVersion().getCounter(); record1.setDirty(); record1.fromStream("This is the second version".getBytes()); record1.save(); database.commit(); db2.activateOnCurrentThread(); ORecordBytes record2 = db2.load(record1.getIdentity(), "*:-1", true); Assert.assertEquals(record2.getRecordVersion().getCounter(), v1 + 1); Assert.assertTrue(new String(record2.toStream()).contains("second")); } finally { database.activateOnCurrentThread(); database.close(); db2.activateOnCurrentThread(); db2.close(); } } @Test(dependsOnMethods = "testTransactionOptimisticCacheMgmt2Db") public void testTransactionMultipleRecords() throws IOException { final OSchema schema = database.getMetadata().getSchema(); if (!schema.existsClass("Account")) schema.createClass("Account"); long totalAccounts = database.countClusterElements("Account"); String json = "{ \"@class\": \"Account\", \"type\": \"Residence\", \"street\": \"Piazza di Spagna\"}"; database.begin(TXTYPE.OPTIMISTIC); for (int g = 0; g < 1000; g++) { ODocument doc = new ODocument("Account"); doc.fromJSON(json); doc.field("nr", g); doc.save(); } database.commit(); Assert.assertEquals(database.countClusterElements("Account"), totalAccounts + 1000); database.close(); } @SuppressWarnings("unchecked") public void createGraphInTx() { final OSchema schema = database.getMetadata().getSchema(); if (!schema.existsClass("Profile")) schema.createClass("Profile"); database.begin(); ODocument kim = new ODocument("Profile").field("name", "Kim").field("surname", "Bauer"); ODocument teri = new ODocument("Profile").field("name", "Teri").field("surname", "Bauer"); ODocument jack = new ODocument("Profile").field("name", "Jack").field("surname", "Bauer"); ((HashSet<ODocument>) jack.field("following", new HashSet<ODocument>()).field("following")).add(kim); ((HashSet<ODocument>) kim.field("following", new HashSet<ODocument>()).field("following")).add(teri); ((HashSet<ODocument>) teri.field("following", new HashSet<ODocument>()).field("following")).add(jack); jack.save(); database.commit(); database.close(); database.open("admin", "admin"); ODocument loadedJack = database.load(jack.getIdentity()); Assert.assertEquals(loadedJack.field("name"), "Jack"); Collection<ODocument> jackFollowings = loadedJack.field("following"); Assert.assertNotNull(jackFollowings); Assert.assertEquals(jackFollowings.size(), 1); ODocument loadedKim = jackFollowings.iterator().next(); Assert.assertEquals(loadedKim.field("name"), "Kim"); Collection<ODocument> kimFollowings = loadedKim.field("following"); Assert.assertNotNull(kimFollowings); Assert.assertEquals(kimFollowings.size(), 1); ODocument loadedTeri = kimFollowings.iterator().next(); Assert.assertEquals(loadedTeri.field("name"), "Teri"); Collection<ODocument> teriFollowings = loadedTeri.field("following"); Assert.assertNotNull(teriFollowings); Assert.assertEquals(teriFollowings.size(), 1); Assert.assertEquals(teriFollowings.iterator().next().field("name"), "Jack"); database.close(); } public void testNestedTx() throws Exception { final ExecutorService executorService = Executors.newSingleThreadExecutor(); final Callable<Void> assertEmptyRecord = new Callable<Void>() { @Override public Void call() throws Exception { final ODatabaseDocumentTx db = new ODatabaseDocumentTx(database.getURL()); db.open("admin", "admin"); try { Assert.assertEquals(db.countClass("NestedTxClass"), 0); } finally { db.close(); } return null; } }; final OSchema schema = database.getMetadata().getSchema(); if (!schema.existsClass("NestedTxClass")) schema.createClass("NestedTxClass"); database.begin(); final ODocument externalDocOne = new ODocument("NestedTxClass"); externalDocOne.field("v", "val1"); externalDocOne.save(); Future assertFuture = executorService.submit(assertEmptyRecord); assertFuture.get(); database.begin(); final ODocument externalDocTwo = new ODocument("NestedTxClass"); externalDocTwo.field("v", "val2"); externalDocTwo.save(); assertFuture = executorService.submit(assertEmptyRecord); assertFuture.get(); database.commit(); assertFuture = executorService.submit(assertEmptyRecord); assertFuture.get(); final ODocument externalDocThree = new ODocument("NestedTxClass"); externalDocThree.field("v", "val3"); externalDocThree.save(); database.commit(); Assert.assertTrue(!database.getTransaction().isActive()); Assert.assertEquals(database.countClass("NestedTxClass"), 3); } public void testNestedTxRollbackOne() throws Exception { final ExecutorService executorService = Executors.newSingleThreadExecutor(); final Callable<Void> assertEmptyRecord = new Callable<Void>() { @Override public Void call() throws Exception { final ODatabaseDocumentTx db = new ODatabaseDocumentTx(database.getURL()); db.open("admin", "admin"); try { Assert.assertEquals(db.countClass("NestedTxRollbackOne"), 1); } finally { db.close(); } return null; } }; final OSchema schema = database.getMetadata().getSchema(); if (!schema.existsClass("NestedTxRollbackOne")) schema.createClass("NestedTxRollbackOne"); ODocument brokenDocOne = new ODocument("NestedTxRollbackOne"); brokenDocOne.save(); brokenDocOne = database.load(brokenDocOne.getIdentity(), "*:-1", true); ODocument brokenDocTwo = database.load(brokenDocOne.getIdentity(), "*:-1", true); brokenDocTwo.setDirty(); brokenDocTwo.field("v", "vstr"); brokenDocTwo.save(); try { database.begin(); final ODocument externalDocOne = new ODocument("NestedTxRollbackOne"); externalDocOne.field("v", "val1"); externalDocOne.save(); Future assertFuture = executorService.submit(assertEmptyRecord); assertFuture.get(); database.begin(); ODocument externalDocTwo = new ODocument("NestedTxRollbackOne"); externalDocTwo.field("v", "val2"); externalDocTwo.save(); assertFuture = executorService.submit(assertEmptyRecord); assertFuture.get(); brokenDocOne.setDirty(); brokenDocOne.save(); database.commit(); assertFuture = executorService.submit(assertEmptyRecord); assertFuture.get(); final ODocument externalDocThree = new ODocument("NestedTxRollbackOne"); externalDocThree.field("v", "val3"); externalDocThree.save(); database.commit(); Assert.fail(); } catch (OConcurrentModificationException e) { database.rollback(); } catch (OResponseProcessingException e) { database.rollback(); } Assert.assertTrue(!database.getTransaction().isActive()); Assert.assertEquals(database.countClass("NestedTxRollbackOne"), 1); } public void testNestedTxRollbackTwo() { final OSchema schema = database.getMetadata().getSchema(); if (!schema.existsClass("NestedTxRollbackTwo")) schema.createClass("NestedTxRollbackTwo"); database.begin(); try { final ODocument externalDocOne = new ODocument("NestedTxRollbackTwo"); externalDocOne.field("v", "val1"); externalDocOne.save(); database.begin(); final ODocument externalDocTwo = new ODocument("NestedTxRollbackTwo"); externalDocTwo.field("v", "val2"); externalDocTwo.save(); database.rollback(); database.begin(); final ODocument externalDocFour = new ODocument("NestedTxRollbackTwo"); externalDocFour.field("v", "val4"); externalDocFour.save(); database.commit(); final ODocument externalDocThree = new ODocument("NestedTxRollbackTwo"); externalDocThree.field("v", "val3"); externalDocThree.save(); database.commit(); Assert.fail(); } catch (ORollbackException e) { database.rollback(); } Assert.assertTrue(!database.getTransaction().isActive()); Assert.assertEquals(database.countClass("NestedTxRollbackTwo"), 0); } }
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.jspf.executor; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.Map; import org.apache.james.jspf.core.DNSLookupContinuation; import org.apache.james.jspf.core.DNSResponse; import org.apache.james.jspf.core.SPFChecker; import org.apache.james.jspf.core.SPFCheckerExceptionCatcher; import org.apache.james.jspf.core.SPFSession; import org.apache.james.jspf.core.exceptions.SPFResultException; import org.apache.james.jspf.core.exceptions.TimeoutException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Async implementation of SPFExecutor * */ public class StagedMultipleSPFExecutor implements SPFExecutor, Runnable { private static final Logger LOGGER = LoggerFactory.getLogger(StagedMultipleSPFExecutor.class); private static final String ATTRIBUTE_STAGED_EXECUTOR_CONTINUATION = "StagedMultipleSPFExecutor.continuation"; private static class ResponseQueueImpl extends LinkedList<IResponse> implements IResponseQueue { private static final long serialVersionUID = 5714025260393791651L; private int waitingThreads = 0; /** * @see org.apache.james.jspf.executor.IResponseQueue#insertResponse(org.apache.james.jspf.executor.IResponse) */ public synchronized void insertResponse(IResponse r) { addLast(r); notify(); } /** * @see org.apache.james.jspf.executor.IResponseQueue#removeResponse() */ public synchronized IResponse removeResponse() { if ( (size() - waitingThreads <= 0) ) { try { waitingThreads++; wait();} catch (InterruptedException e) {Thread.interrupted();} waitingThreads--; } return (IResponse)removeFirst(); } } // Use short as id because the id header is limited to 16 bit // From RFC1035 4.1.1. Header section format : // // ID A 16 bit identifier assigned by the program that // generates any kind of query. This identifier is copied // the corresponding reply and can be used by the requester // to match up replies to outstanding queries. // private static short id; private synchronized int nextId() { return id++; } private DNSAsynchLookupService dnsProbe; private Thread worker; private Map<Integer,SPFSession> sessions; private Map<Integer,FutureSPFResult>results; private ResponseQueueImpl responseQueue; public StagedMultipleSPFExecutor(DNSAsynchLookupService service) { this.dnsProbe = service; this.responseQueue = new ResponseQueueImpl(); this.sessions = Collections.synchronizedMap(new HashMap<Integer,SPFSession>()); this.results = Collections.synchronizedMap(new HashMap<Integer,FutureSPFResult>()); this.worker = new Thread(this); this.worker.setDaemon(true); this.worker.setName("SPFExecutor"); this.worker.start(); } /** * Execute the non-blocking part of the processing and returns. * If the working queue is full (50 pending responses) this method will not return * until the queue is again not full. * * @see org.apache.james.jspf.executor.SPFExecutor#execute(org.apache.james.jspf.core.SPFSession, org.apache.james.jspf.executor.FutureSPFResult) */ public void execute(SPFSession session, FutureSPFResult result) { execute(session, result, true); } public void execute(SPFSession session, FutureSPFResult result, boolean throttle) { SPFChecker checker; while ((checker = session.popChecker()) != null) { // only execute checkers we added (better recursivity) LOGGER.debug("Executing checker: {}", checker); try { DNSLookupContinuation cont = checker.checkSPF(session); // if the checker returns a continuation we return it if (cont != null) { invokeAsynchService(session, result, cont, throttle); return; } } catch (Exception e) { while (e != null) { while (checker == null || !(checker instanceof SPFCheckerExceptionCatcher)) { checker = session.popChecker(); } try { ((SPFCheckerExceptionCatcher) checker).onException(e, session); e = null; } catch (SPFResultException ex) { e = ex; } finally { checker = null; } } } } result.setSPFResult(session); } /** * throttle should be true only when the caller thread is the client and not the worker thread. * We could even remove the throttle parameter and check the currentThread. * This way the worker is never "blocked" while outside callers will be blocked if our * queue is too big (so this is not fully "asynchronous"). */ private synchronized void invokeAsynchService(SPFSession session, FutureSPFResult result, DNSLookupContinuation cont, boolean throttle) { while (throttle && results.size() > 50) { try { this.wait(100); } catch (InterruptedException e) { } } int nextId = nextId(); sessions.put(new Integer(nextId), session); results.put(new Integer(nextId), result); session.setAttribute(ATTRIBUTE_STAGED_EXECUTOR_CONTINUATION, cont); dnsProbe.getRecordsAsynch(cont.getRequest(), nextId, responseQueue); } public void run() { while (true) { IResponse resp = responseQueue.removeResponse(); Integer respId = (Integer)resp.getId(); SPFSession session = sessions.remove(respId); FutureSPFResult result = results.remove(respId); DNSLookupContinuation cont = (DNSLookupContinuation) session.getAttribute(ATTRIBUTE_STAGED_EXECUTOR_CONTINUATION); DNSResponse response; if (resp.getException() != null) { response = new DNSResponse((TimeoutException) resp.getException()); } else { response = new DNSResponse(resp.getValue()); } try { cont = cont.getListener().onDNSResponse(response, session); if (cont != null) { invokeAsynchService(session, result, cont, false); } else { execute(session, result, false); } } catch (Exception e) { SPFChecker checker = null; while (e != null) { while (checker == null || !(checker instanceof SPFCheckerExceptionCatcher)) { checker = session.popChecker(); } try { ((SPFCheckerExceptionCatcher) checker).onException(e, session); e = null; } catch (SPFResultException ex) { e = ex; } finally { checker = null; } } execute(session, result, false); } } } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2016-2016 Karl Griesser (fullref@gmail.com) * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.exasol.views; import org.eclipse.jface.dialogs.IDialogPage; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Text; import org.jkiss.dbeaver.ext.exasol.Activator; import org.jkiss.dbeaver.ext.exasol.ExasolConstants; import org.jkiss.dbeaver.ext.exasol.ExasolMessages; import org.jkiss.dbeaver.model.DBPDataSourceContainer; import org.jkiss.dbeaver.model.connection.DBPConnectionConfiguration; import org.jkiss.dbeaver.ui.ICompositeDialogPage; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.dialogs.connection.ClientHomesSelector; import org.jkiss.dbeaver.ui.dialogs.connection.ConnectionPageAbstract; import org.jkiss.dbeaver.ui.dialogs.connection.DriverPropertiesDialogPage; import org.jkiss.utils.CommonUtils; import java.util.Locale; public class ExasolConnectionPage extends ConnectionPageAbstract implements ICompositeDialogPage { private Label backupHostLabel; public ExasolConnectionPage() { } private Text hostText; private Text backupHostText; private Text portText; private Text usernameText; private Text passwordText; private ClientHomesSelector homesSelector; private Button useBackupHostList; private boolean showBackupHosts = false; private Button encryptCommunication; private static ImageDescriptor EXASOL_LOGO_IMG = Activator.getImageDescriptor("icons/exasol.png"); //$NON-NLS-1$ @Override public void dispose() { super.dispose(); } @Override public void createControl(Composite composite) { setImageDescriptor(EXASOL_LOGO_IMG); Composite control = new Composite(composite, SWT.NONE); control.setLayout(new GridLayout(1, false)); control.setLayoutData(new GridData(GridData.FILL_BOTH)); ModifyListener textListener = new ModifyListener() { @Override public void modifyText(ModifyEvent e) { evaluateURL(); } }; { Composite addrGroup = UIUtils.createControlGroup(control, ExasolMessages.label_database, 2, 0, 0); GridData gd = new GridData(GridData.FILL_HORIZONTAL); addrGroup.setLayoutData(gd); Label hostLabel = UIUtils.createControlLabel(addrGroup, ExasolMessages.label_host_list); hostLabel.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_END)); hostText = new Text(addrGroup, SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.grabExcessHorizontalSpace = true; hostText.setLayoutData(gd); hostText.addModifyListener(textListener); backupHostLabel = UIUtils.createControlLabel(addrGroup, ExasolMessages.label_backup_host_list); gd = new GridData(GridData.HORIZONTAL_ALIGN_END); backupHostLabel.setLayoutData(gd); backupHostLabel.setEnabled(showBackupHosts); backupHostText = new Text(addrGroup, SWT.BORDER); gd = new GridData(GridData.FILL_HORIZONTAL); gd.grabExcessHorizontalSpace = true; backupHostText.setLayoutData(gd); backupHostText.addModifyListener(textListener); useBackupHostList = UIUtils.createLabelCheckbox(addrGroup, ExasolMessages.label_use_backup_host_list, showBackupHosts); useBackupHostList.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { backupHostLabel.setEnabled(useBackupHostList.getSelection()); backupHostText.setEnabled(useBackupHostList.getSelection()); //reset text if disabled if (!useBackupHostList.getSelection()) backupHostText.setText(""); //$NON-NLS-1$ } }); Label portLabel = UIUtils.createControlLabel(addrGroup, ExasolMessages.dialog_connection_port); gd = new GridData(GridData.HORIZONTAL_ALIGN_END); portLabel.setLayoutData(gd); portText = new Text(addrGroup, SWT.BORDER); gd = new GridData(GridData.VERTICAL_ALIGN_BEGINNING); gd.widthHint = 40; portText.setLayoutData(gd); portText.addVerifyListener(UIUtils.getIntegerVerifyListener(Locale.getDefault())); portText.addModifyListener(textListener); encryptCommunication = UIUtils.createLabelCheckbox(addrGroup, ExasolMessages.label_encrypt, false); } { Composite addrGroup = UIUtils.createControlGroup(control, ExasolMessages.label_security, 2, 0, 0); GridData gd = new GridData(GridData.FILL_HORIZONTAL); addrGroup.setLayoutData(gd); Label usernameLabel = UIUtils.createControlLabel(addrGroup, ExasolMessages.dialog_connection_user_name); usernameLabel.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_END)); usernameText = new Text(addrGroup, SWT.BORDER); gd = new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING); gd.widthHint = 200; usernameText.setLayoutData(gd); usernameText.addModifyListener(textListener); Label passwordLabel = UIUtils.createControlLabel(addrGroup, ExasolMessages.dialog_connection_password); passwordLabel.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_END)); Composite passPH = UIUtils.createPlaceholder(addrGroup, 2, 5); passwordText = new Text(passPH, SWT.BORDER | SWT.PASSWORD); gd = new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING); gd.widthHint = 200; passwordText.setLayoutData(gd); passwordText.addModifyListener(textListener); createSavePasswordButton(passPH); } createDriverPanel(control); setControl(control); } @Override public boolean isComplete() { return hostText != null && portText != null && !CommonUtils.isEmpty(hostText.getText()) && !CommonUtils.isEmpty(portText.getText()); } @Override public void loadSettings() { super.loadSettings(); // Load values from new connection info DBPConnectionConfiguration connectionInfo = site.getActiveDataSource().getConnectionConfiguration(); if (hostText != null) { if (!CommonUtils.isEmpty(connectionInfo.getHostName())) { hostText.setText(connectionInfo.getHostName()); } else { hostText.setText(""); //$NON-NLS-1$ } } if (portText != null) { if (!CommonUtils.isEmpty(connectionInfo.getHostPort())) { portText.setText(String.valueOf(connectionInfo.getHostPort())); } else if (site.getDriver().getDefaultPort() != null) { portText.setText(site.getDriver().getDefaultPort()); } else { portText.setText("8563"); //$NON-NLS-1$ } } if (usernameText != null) { usernameText.setText(CommonUtils.notEmpty(connectionInfo.getUserName())); } if (passwordText != null) { passwordText.setText(CommonUtils.notEmpty(connectionInfo.getUserPassword())); } String backupHostText = connectionInfo.getProviderProperty(ExasolConstants.DRV_BACKUP_HOST_LIST); if (!CommonUtils.isEmpty(backupHostText)) { this.backupHostLabel.setEnabled(true); this.backupHostText.setText(backupHostText); this.backupHostText.setEnabled(true); this.useBackupHostList.setSelection(true); } else { this.backupHostLabel.setEnabled(false); this.backupHostText.setEnabled(false); this.useBackupHostList.setSelection(false); } String encryptComm = connectionInfo.getProviderProperty(ExasolConstants.DRV_ENCRYPT); if (encryptComm != null) { if (encryptComm.equals("1")) //$NON-NLS-1$ this.encryptCommunication.setEnabled(true); } } @Override public void saveSettings(DBPDataSourceContainer dataSource) { DBPConnectionConfiguration connectionInfo = dataSource.getConnectionConfiguration(); if (hostText != null) { connectionInfo.setHostName(hostText.getText().trim()); } if (portText != null) { connectionInfo.setHostPort(portText.getText().trim()); } if (usernameText != null) { connectionInfo.setUserName(usernameText.getText().trim()); } if (passwordText != null) { connectionInfo.setUserPassword(passwordText.getText()); } if (homesSelector != null) { connectionInfo.setClientHomeId(homesSelector.getSelectedHome()); } connectionInfo.setProviderProperty(ExasolConstants.DRV_BACKUP_HOST_LIST, backupHostText.getText()); if (this.encryptCommunication.getSelection()) connectionInfo.setProviderProperty(ExasolConstants.DRV_ENCRYPT, "1"); //$NON-NLS-1$ super.saveSettings(dataSource); } private void evaluateURL() { site.updateButtons(); } @Override public IDialogPage[] getSubPages(boolean extrasOnly) { return new IDialogPage[]{ new DriverPropertiesDialogPage(this) }; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.vector.expressions; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.sql.Timestamp; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalColGreaterEqualDecimalColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalColLessDecimalScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalScalarGreaterDecimalColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColumnBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColumnNotBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColEqualLongScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterLongColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterLongScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColLessLongColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColumnBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColumnNotBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarGreaterLongColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarLessLongColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColumnBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColumnNotBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalColEqualDecimalScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalColEqualDecimalColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalScalarEqualDecimalColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterTimestampColumnBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterTimestampColumnNotBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddLongScalar; import org.apache.hadoop.hive.ql.exec.vector.util.VectorizedRowGroupGenUtil; import org.junit.Assert; import org.junit.Test; /** * Unit tests for filter expressions. */ public class TestVectorFilterExpressions { @Test public void testFilterLongColEqualLongScalar() { VectorizedRowBatch vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(1024, 1, 23); FilterLongColEqualLongScalar expr = new FilterLongColEqualLongScalar(0, 46); expr.evaluate(vrg); assertEquals(1, vrg.size); assertEquals(1, vrg.selected[0]); } @Test public void testFilterLongColGreaterLongColumn() { int seed = 17; VectorizedRowBatch b = VectorizedRowGroupGenUtil.getVectorizedRowBatch( VectorizedRowBatch.DEFAULT_SIZE, 2, seed); LongColumnVector lcv0 = (LongColumnVector) b.cols[0]; LongColumnVector lcv1 = (LongColumnVector) b.cols[1]; b.size = 3; FilterLongColGreaterLongColumn expr = new FilterLongColGreaterLongColumn(0, 1); // Basic case lcv0.vector[0] = 10; lcv0.vector[1] = 10; lcv0.vector[2] = 10; lcv1.vector[0] = 20; lcv1.vector[1] = 1; lcv1.vector[2] = 7; expr.evaluate(b); assertEquals(2, b.size); assertEquals(1, b.selected[0]); assertEquals(2, b.selected[1]); // handle null with selected in use lcv0.noNulls = false; lcv0.isNull[1] = true; expr.evaluate(b); assertEquals(1, b.size); assertEquals(2, b.selected[0]); // handle repeating b.size = 3; b.selectedInUse = false; lcv0.isRepeating = true; lcv0.noNulls = true; expr.evaluate(b); assertEquals(2, b.size); // handle repeating null b.size = 3; b.selectedInUse = false; lcv0.isNull[0] = true; lcv0.noNulls = false; expr.evaluate(b); assertEquals(0, b.size); // handle null on both sizes (not repeating) b.size = 3; b.selectedInUse = false; lcv0.isRepeating = false; lcv1.noNulls = false; lcv1.isNull[2] = true; expr.evaluate(b); assertEquals(0, b.size); } @Test public void testColOpScalarNumericFilterNullAndRepeatingLogic() { // No nulls, not repeating FilterLongColGreaterLongScalar f = new FilterLongColGreaterLongScalar(0, 1); VectorizedRowBatch batch = this.getSimpleLongBatch(); batch.cols[0].noNulls = true; batch.cols[0].isRepeating = false; f.evaluate(batch); // only last 2 rows qualify Assert.assertEquals(2, batch.size); // show that their positions are recorded Assert.assertTrue(batch.selectedInUse); Assert.assertEquals(2, batch.selected[0]); Assert.assertEquals(3, batch.selected[1]); // make everything qualify and ensure selected is not in use f = new FilterLongColGreaterLongScalar(0, -1); // col > -1 batch = getSimpleLongBatch(); f.evaluate(batch); Assert.assertFalse(batch.selectedInUse); Assert.assertEquals(4, batch.size); // has nulls, not repeating batch = getSimpleLongBatch(); f = new FilterLongColGreaterLongScalar(0, 1); // col > 1 batch.cols[0].noNulls = false; batch.cols[0].isRepeating = false; batch.cols[0].isNull[3] = true; f.evaluate(batch); Assert.assertTrue(batch.selectedInUse); Assert.assertEquals(1, batch.size); Assert.assertEquals(2, batch.selected[0]); // no nulls, is repeating batch = getSimpleLongBatch(); f = new FilterLongColGreaterLongScalar(0, -1); // col > -1 batch.cols[0].noNulls = true; batch.cols[0].isRepeating = true; f.evaluate(batch); Assert.assertFalse(batch.selectedInUse); Assert.assertEquals(4, batch.size); // everything qualifies (4 rows, all with value -1) // has nulls, is repeating batch = getSimpleLongBatch(); batch.cols[0].noNulls = false; batch.cols[0].isRepeating = true; batch.cols[0].isNull[0] = true; f.evaluate(batch); Assert.assertEquals(0, batch.size); // all values are null so none qualify } private VectorizedRowBatch getSimpleLongBatch() { VectorizedRowBatch batch = VectorizedRowGroupGenUtil .getVectorizedRowBatch(4, 1, 1); LongColumnVector lcv0 = (LongColumnVector) batch.cols[0]; lcv0.vector[0] = 0; lcv0.vector[1] = 1; lcv0.vector[2] = 2; lcv0.vector[3] = 3; return batch; } @Test public void testFilterLongColLessLongColumn() { int seed = 17; VectorizedRowBatch vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 3, seed); LongColumnVector lcv0 = (LongColumnVector) vrg.cols[0]; LongColumnVector lcv1 = (LongColumnVector) vrg.cols[1]; LongColumnVector lcv2 = (LongColumnVector) vrg.cols[2]; FilterLongColLessLongColumn expr = new FilterLongColLessLongColumn(2, 1); LongColAddLongScalar childExpr = new LongColAddLongScalar(0, 10, 2); expr.setChildExpressions(new VectorExpression[] {childExpr}); //Basic case lcv0.vector[0] = 10; lcv0.vector[1] = 20; lcv0.vector[2] = 9; lcv0.vector[3] = 20; lcv0.vector[4] = 10; lcv1.vector[0] = 20; lcv1.vector[1] = 10; lcv1.vector[2] = 20; lcv1.vector[3] = 10; lcv1.vector[4] = 20; expr.evaluate(vrg); assertEquals(1, vrg.size); assertEquals(2, vrg.selected[0]); } @Test public void testFilterLongScalarLessLongColumn() { int seed = 17; VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 2, seed); LongColumnVector lcv0 = (LongColumnVector) vrb.cols[0]; FilterLongScalarLessLongColumn expr1 = new FilterLongScalarLessLongColumn(15, 0); //Basic case lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; expr1.evaluate(vrb); assertEquals(2, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(1, vrb.selected[0]); assertEquals(2, vrb.selected[1]); FilterLongScalarGreaterLongColumn expr2 = new FilterLongScalarGreaterLongColumn(18, 0); expr2.evaluate(vrb); assertEquals(1, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(2, vrb.selected[0]); //With nulls VectorizedRowBatch vrb1 = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 2, seed); lcv0 = (LongColumnVector) vrb1.cols[0]; lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; lcv0.noNulls = false; lcv0.isNull[0] = true; lcv0.isNull[2] = true; expr1.evaluate(vrb1); assertEquals(1, vrb1.size); assertTrue(vrb1.selectedInUse); assertEquals(1, vrb1.selected[0]); //With nulls and selected VectorizedRowBatch vrb2 = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 7, 2, seed); vrb2.selectedInUse = true; vrb2.selected[0] = 1; vrb2.selected[1] = 2; vrb2.selected[2] = 4; vrb2.size = 3; lcv0 = (LongColumnVector) vrb2.cols[0]; lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; lcv0.vector[5] = 19; lcv0.vector[6] = 21; lcv0.noNulls = false; lcv0.isNull[0] = true; lcv0.isNull[2] = true; lcv0.isNull[5] = true; expr1.evaluate(vrb2); assertEquals(1, vrb2.size); assertTrue(vrb2.selectedInUse); assertEquals(1, vrb2.selected[0]); //Repeating non null VectorizedRowBatch vrb3 = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 7, 2, seed); lcv0 = (LongColumnVector) vrb3.cols[0]; lcv0.isRepeating = true; lcv0.vector[0] = 17; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; expr1.evaluate(vrb3); assertEquals(7, vrb3.size); assertFalse(vrb3.selectedInUse); assertTrue(lcv0.isRepeating); //Repeating null lcv0.noNulls = false; lcv0.vector[0] = 17; lcv0.isNull[0] = true; expr1.evaluate(vrb3); assertEquals(0, vrb3.size); } @Test public void testFilterLongBetween() { int seed = 17; VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 2, seed); LongColumnVector lcv0 = (LongColumnVector) vrb.cols[0]; VectorExpression expr1 = new FilterLongColumnBetween(0, 15, 17); //Basic case lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; expr1.evaluate(vrb); assertEquals(2, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(2, vrb.selected[0]); assertEquals(3, vrb.selected[1]); //With nulls VectorizedRowBatch vrb1 = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 2, seed); lcv0 = (LongColumnVector) vrb1.cols[0]; lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; lcv0.noNulls = false; lcv0.isNull[0] = true; lcv0.isNull[2] = true; expr1.evaluate(vrb1); assertEquals(1, vrb1.size); assertTrue(vrb1.selectedInUse); assertEquals(3, vrb1.selected[0]); //With nulls and selected VectorizedRowBatch vrb2 = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 7, 2, seed); vrb2.selectedInUse = true; vrb2.selected[0] = 1; vrb2.selected[1] = 2; vrb2.selected[2] = 4; vrb2.size = 3; lcv0 = (LongColumnVector) vrb2.cols[0]; lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; lcv0.vector[5] = 19; lcv0.vector[6] = 21; lcv0.noNulls = false; lcv0.isNull[0] = true; lcv0.isNull[2] = true; lcv0.isNull[5] = true; expr1.evaluate(vrb2); assertEquals(0, vrb2.size); //Repeating non null VectorizedRowBatch vrb3 = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 7, 2, seed); lcv0 = (LongColumnVector) vrb3.cols[0]; lcv0.isRepeating = true; lcv0.vector[0] = 17; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; expr1.evaluate(vrb3); assertEquals(7, vrb3.size); assertFalse(vrb3.selectedInUse); assertTrue(lcv0.isRepeating); //Repeating null lcv0.noNulls = false; lcv0.vector[0] = 17; lcv0.isNull[0] = true; expr1.evaluate(vrb3); assertEquals(0, vrb3.size); // Test getters/setters FilterLongColumnBetween betweenExpr = (FilterLongColumnBetween) expr1; assertEquals(15, betweenExpr.getLeftValue()); assertEquals(17, betweenExpr.getRightValue()); assertEquals(0, betweenExpr.getColNum()); betweenExpr.setColNum(1); assertEquals(1, betweenExpr.getColNum()); betweenExpr.setLeftValue(2); assertEquals(2, betweenExpr.getLeftValue()); betweenExpr.setRightValue(3); assertEquals(3, betweenExpr.getRightValue()); } @Test public void testFilterLongNotBetween() { // Spot check only. null & repeating behavior are checked elsewhere for the same template. int seed = 17; VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 2, seed); LongColumnVector lcv0 = (LongColumnVector) vrb.cols[0]; //Basic case lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; VectorExpression expr = new FilterLongColumnNotBetween(0, 10, 20); expr.evaluate(vrb); assertEquals(1, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(0, vrb.selected[0]); } @Test public void testFilterDoubleBetween() { // Spot check only. null & repeating behavior are checked elsewhere for the same template. int seed = 17; VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 2, seed); DoubleColumnVector dcv0 = new DoubleColumnVector(); vrb.cols[0] = dcv0; //Basic case dcv0.vector[0] = 5; dcv0.vector[1] = 20; dcv0.vector[2] = 17; dcv0.vector[3] = 15; dcv0.vector[4] = 10; VectorExpression expr = new FilterDoubleColumnBetween(0, 20, 21); expr.evaluate(vrb); assertEquals(1, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(1, vrb.selected[0]); } @Test public void testFilterDoubleNotBetween() { // Spot check only. null & repeating behavior are checked elsewhere for the same template. int seed = 17; VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 2, seed); vrb.cols[0] = new DoubleColumnVector(); DoubleColumnVector dcv = (DoubleColumnVector) vrb.cols[0]; //Basic case dcv.vector[0] = 5; dcv.vector[1] = 20; dcv.vector[2] = 17; dcv.vector[3] = 15; dcv.vector[4] = 10; VectorExpression expr = new FilterDoubleColumnNotBetween(0, 10, 20); expr.evaluate(vrb); assertEquals(1, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(0, vrb.selected[0]); } static byte[] a = null; static byte[] b = null; static byte[] c = null; static { try { a = "a".getBytes("UTF-8"); b = "b".getBytes("UTF-8"); c = "c".getBytes("UTF-8"); } catch (Exception e) { ; // won't happen } } @Test public void testFilterStringBetween() { int seed = 17; VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 3, 2, seed); vrb.cols[0] = new BytesColumnVector(); BytesColumnVector bcv = (BytesColumnVector) vrb.cols[0]; bcv.initBuffer(); bcv.setVal(0, a, 0, 1); bcv.setVal(1, b, 0, 1); bcv.setVal(2, c, 0, 1); VectorExpression expr = new FilterStringColumnBetween(0, b, c); // basic test expr.evaluate(vrb); assertEquals(2, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(1, vrb.selected[0]); assertEquals(2, vrb.selected[1]); // nulls vrb.selectedInUse = false; vrb.size = 3; bcv.noNulls = false; bcv.isNull[2] = true; expr.evaluate(vrb); assertEquals(1, vrb.size); assertEquals(1, vrb.selected[0]); assertTrue(vrb.selectedInUse); // repeating vrb.selectedInUse = false; vrb.size = 3; bcv.noNulls = true; bcv.isRepeating = true; expr.evaluate(vrb); assertEquals(0, vrb.size); // nulls and repeating vrb.selectedInUse = false; vrb.size = 3; bcv.noNulls = false; bcv.isRepeating = true; bcv.isNull[0] = true; bcv.setVal(0, b, 0, 1); expr.evaluate(vrb); assertEquals(0, vrb.size); } @Test public void testFilterStringNotBetween() { // Spot check only. Non-standard cases are checked for the same template in another test. int seed = 17; VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 3, 2, seed); vrb.cols[0] = new BytesColumnVector(); BytesColumnVector bcv = (BytesColumnVector) vrb.cols[0]; bcv.initBuffer(); bcv.setVal(0, a, 0, 1); bcv.setVal(1, b, 0, 1); bcv.setVal(2, c, 0, 1); VectorExpression expr = new FilterStringColumnNotBetween(0, b, c); expr.evaluate(vrb); assertEquals(1, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(0, vrb.selected[0]); } @Test public void testFilterTimestampBetween() { VectorizedRowBatch vrb = new VectorizedRowBatch(1); vrb.cols[0] = new TimestampColumnVector(); TimestampColumnVector lcv0 = (TimestampColumnVector) vrb.cols[0]; Timestamp startTS = new Timestamp(0); // the epoch Timestamp endTS = Timestamp.valueOf("2013-11-05 00:00:00.000000000"); Timestamp ts0 = Timestamp.valueOf("1963-11-06 00:00:00.000"); lcv0.set(0, ts0); Timestamp ts1 = Timestamp.valueOf("1983-11-06 00:00:00.000"); lcv0.set(1, ts1); Timestamp ts2 = Timestamp.valueOf("2099-11-06 00:00:00.000"); lcv0.set(2, ts2); vrb.size = 3; VectorExpression expr1 = new FilterTimestampColumnBetween(0, startTS, endTS); expr1.evaluate(vrb); assertEquals(1, vrb.size); assertEquals(true, vrb.selectedInUse); assertEquals(1, vrb.selected[0]); } @Test public void testFilterTimestampNotBetween() { VectorizedRowBatch vrb = new VectorizedRowBatch(1); vrb.cols[0] = new TimestampColumnVector(); TimestampColumnVector lcv0 = (TimestampColumnVector) vrb.cols[0]; Timestamp startTS = Timestamp.valueOf("2013-11-05 00:00:00.000000000"); Timestamp endTS = Timestamp.valueOf("2013-11-05 00:00:00.000000010"); Timestamp ts0 = Timestamp.valueOf("2013-11-04 00:00:00.000000000"); lcv0.set(0, ts0); Timestamp ts1 = Timestamp.valueOf("2013-11-05 00:00:00.000000002"); lcv0.set(1, ts1); Timestamp ts2 = Timestamp.valueOf("2099-11-06 00:00:00.000"); lcv0.set(2, ts2); vrb.size = 3; VectorExpression expr1 = new FilterTimestampColumnNotBetween(0, startTS, endTS); expr1.evaluate(vrb); assertEquals(2, vrb.size); assertEquals(true, vrb.selectedInUse); assertEquals(0, vrb.selected[0]); assertEquals(2, vrb.selected[1]); } /** * Test the IN filter VectorExpression classes. */ @Test public void testFilterLongIn() { int seed = 17; VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 2, seed); LongColumnVector lcv0 = (LongColumnVector) vrb.cols[0]; long[] inList = {5, 20}; FilterLongColumnInList f = new FilterLongColumnInList(0); f.setInListValues(inList); VectorExpression expr1 = f; // Basic case lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; expr1.evaluate(vrb); assertEquals(2, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(0, vrb.selected[0]); assertEquals(1, vrb.selected[1]); // With nulls VectorizedRowBatch vrb1 = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 2, seed); lcv0 = (LongColumnVector) vrb1.cols[0]; lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; lcv0.noNulls = false; lcv0.isNull[0] = true; lcv0.isNull[2] = true; expr1.evaluate(vrb1); assertEquals(1, vrb1.size); assertTrue(vrb1.selectedInUse); assertEquals(1, vrb1.selected[0]); // With nulls and selected VectorizedRowBatch vrb2 = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 7, 2, seed); vrb2.selectedInUse = true; vrb2.selected[0] = 1; vrb2.selected[1] = 2; vrb2.selected[2] = 4; vrb2.size = 3; lcv0 = (LongColumnVector) vrb2.cols[0]; lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; lcv0.vector[5] = 19; lcv0.vector[6] = 21; lcv0.noNulls = false; lcv0.isNull[0] = true; lcv0.isNull[2] = true; lcv0.isNull[5] = true; expr1.evaluate(vrb2); assertEquals(1, vrb2.size); assertEquals(1, vrb2.selected[0]); // Repeating non null VectorizedRowBatch vrb3 = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 7, 2, seed); lcv0 = (LongColumnVector) vrb3.cols[0]; lcv0.isRepeating = true; lcv0.vector[0] = 5; lcv0.vector[1] = 20; lcv0.vector[2] = 17; lcv0.vector[3] = 15; lcv0.vector[4] = 10; expr1.evaluate(vrb3); assertEquals(7, vrb3.size); assertFalse(vrb3.selectedInUse); assertTrue(lcv0.isRepeating); // Repeating null lcv0.noNulls = false; lcv0.vector[0] = 5; lcv0.isNull[0] = true; expr1.evaluate(vrb3); assertEquals(0, vrb3.size); } @Test public void testFilterDoubleIn() { int seed = 17; VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 5, 2, seed); DoubleColumnVector dcv0 = new DoubleColumnVector(); vrb.cols[0] = dcv0; double[] inList = {5.0, 20.2}; FilterDoubleColumnInList f = new FilterDoubleColumnInList(0); f.setInListValues(inList); VectorExpression expr1 = f; // Basic sanity check. Other cases are not skipped because it is similar to the case for Long. dcv0.vector[0] = 5.0; dcv0.vector[1] = 20.2; dcv0.vector[2] = 17.0; dcv0.vector[3] = 15.0; dcv0.vector[4] = 10.0; expr1.evaluate(vrb); assertEquals(2, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(0, vrb.selected[0]); assertEquals(1, vrb.selected[1]); } @Test public void testFilterStringIn() { int seed = 17; VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch( 3, 2, seed); vrb.cols[0] = new BytesColumnVector(); BytesColumnVector bcv = (BytesColumnVector) vrb.cols[0]; bcv.initBuffer(); bcv.setVal(0, a, 0, 1); bcv.setVal(1, b, 0, 1); bcv.setVal(2, c, 0, 1); VectorExpression expr = new FilterStringColumnInList(0); byte[][] inList = {b, c}; ((FilterStringColumnInList) expr).setInListValues(inList); // basic test expr.evaluate(vrb); assertEquals(2, vrb.size); assertTrue(vrb.selectedInUse); assertEquals(1, vrb.selected[0]); assertEquals(2, vrb.selected[1]); // nulls vrb.selectedInUse = false; vrb.size = 3; bcv.noNulls = false; bcv.isNull[2] = true; expr.evaluate(vrb); assertEquals(1, vrb.size); assertEquals(1, vrb.selected[0]); assertTrue(vrb.selectedInUse); // repeating vrb.selectedInUse = false; vrb.size = 3; bcv.noNulls = true; bcv.isRepeating = true; expr.evaluate(vrb); assertEquals(0, vrb.size); // nulls and repeating vrb.selectedInUse = false; vrb.size = 3; bcv.noNulls = false; bcv.isRepeating = true; bcv.isNull[0] = true; bcv.setVal(0, b, 0, 1); expr.evaluate(vrb); assertEquals(0, vrb.size); } /** * This tests the template for Decimal Column-Scalar comparison filters, * called FilterDecimalColumnCompareScalar.txt. Only equal is tested for * multiple cases because the logic is the same for <, >, <=, >=, == and !=. */ @Test public void testFilterDecimalColEqualDecimalScalar() { VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol(); HiveDecimal scalar = HiveDecimal.create("-3.30"); VectorExpression expr = new FilterDecimalColEqualDecimalScalar(0, scalar); expr.evaluate(b); // check that right row(s) are selected assertTrue(b.selectedInUse); assertEquals(1, b.selected[0]); assertEquals(1, b.size); // try again with a null value b = getVectorizedRowBatch1DecimalCol(); b.cols[0].noNulls = false; b.cols[0].isNull[1] = true; expr.evaluate(b); // verify that no rows were selected assertEquals(0, b.size); // try the repeating case b = getVectorizedRowBatch1DecimalCol(); b.cols[0].isRepeating = true; expr.evaluate(b); // verify that no rows were selected assertEquals(0, b.size); // try the repeating null case b = getVectorizedRowBatch1DecimalCol(); b.cols[0].isRepeating = true; b.cols[0].noNulls = false; b.cols[0].isNull[0] = true; expr.evaluate(b); // verify that no rows were selected assertEquals(0, b.size); } /** * This tests the template for Decimal Scalar-Column comparison filters, * called FilterDecimalScalarCompareColumn.txt. Only equal is tested for multiple * cases because the logic is the same for <, >, <=, >=, == and !=. */ @Test public void testFilterDecimalScalarEqualDecimalColumn() { VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol(); HiveDecimal scalar = HiveDecimal.create("-3.30"); VectorExpression expr = new FilterDecimalScalarEqualDecimalColumn(scalar, 0); expr.evaluate(b); // check that right row(s) are selected assertTrue(b.selectedInUse); assertEquals(1, b.selected[0]); assertEquals(1, b.size); // try again with a null value b = getVectorizedRowBatch1DecimalCol(); b.cols[0].noNulls = false; b.cols[0].isNull[1] = true; expr.evaluate(b); // verify that no rows were selected assertEquals(0, b.size); // try the repeating case b = getVectorizedRowBatch1DecimalCol(); b.cols[0].isRepeating = true; expr.evaluate(b); // verify that no rows were selected assertEquals(0, b.size); // try the repeating null case b = getVectorizedRowBatch1DecimalCol(); b.cols[0].isRepeating = true; b.cols[0].noNulls = false; b.cols[0].isNull[0] = true; expr.evaluate(b); // verify that no rows were selected assertEquals(0, b.size); } /** * This tests the template for Decimal Column-Column comparison filters, * called FilterDecimalColumnCompareColumn.txt. Only equal is tested for multiple * cases because the logic is the same for <, >, <=, >=, == and !=. */ @Test public void testFilterDecimalColumnEqualDecimalColumn() { VectorizedRowBatch b = getVectorizedRowBatch2DecimalCol(); VectorExpression expr = new FilterDecimalColEqualDecimalColumn(0, 1); expr.evaluate(b); // check that right row(s) are selected assertTrue(b.selectedInUse); assertEquals(1, b.selected[0]); assertEquals(1, b.size); // try again with a null value b = getVectorizedRowBatch2DecimalCol(); b.cols[0].noNulls = false; b.cols[0].isNull[1] = true; expr.evaluate(b); // verify that no rows were selected assertEquals(0, b.size); // try the repeating case b = getVectorizedRowBatch2DecimalCol(); b.cols[0].isRepeating = true; expr.evaluate(b); // verify that no rows were selected assertEquals(0, b.size); // try the repeating null case b = getVectorizedRowBatch2DecimalCol(); b.cols[0].isRepeating = true; b.cols[0].noNulls = false; b.cols[0].isNull[0] = true; expr.evaluate(b); // verify that no rows were selected assertEquals(0, b.size); // try nulls on both sides b = getVectorizedRowBatch2DecimalCol(); b.cols[0].noNulls = false; b.cols[0].isNull[0] = true; b.cols[1].noNulls = false; b.cols[1].isNull[2] = true; expr.evaluate(b); assertEquals(1, b.size); // second of three was selected // try repeating on both sides b = getVectorizedRowBatch2DecimalCol(); b.cols[0].isRepeating = true; b.cols[1].isRepeating = true; expr.evaluate(b); // verify that no rows were selected assertEquals(0, b.size); } /** * Spot check col < scalar for decimal. */ @Test public void testFilterDecimalColLessScalar() { VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol(); HiveDecimal scalar = HiveDecimal.create("0"); VectorExpression expr = new FilterDecimalColLessDecimalScalar(0, scalar); expr.evaluate(b); // check that right row(s) are selected assertTrue(b.selectedInUse); assertEquals(1, b.selected[0]); assertEquals(1, b.size); } /** * Spot check scalar > col for decimal. */ @Test public void testFilterDecimalScalarGreaterThanColumn() { VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol(); HiveDecimal scalar = HiveDecimal.create("0"); VectorExpression expr = new FilterDecimalScalarGreaterDecimalColumn(scalar, 0); expr.evaluate(b); // check that right row(s) are selected assertTrue(b.selectedInUse); assertEquals(1, b.selected[0]); assertEquals(1, b.size); } /** * Spot check col >= col for decimal. */ @Test public void testFilterDecimalColGreaterEqualCol() { VectorizedRowBatch b = getVectorizedRowBatch2DecimalCol(); VectorExpression expr = new FilterDecimalColGreaterEqualDecimalColumn(0, 1); expr.evaluate(b); // check that right row(s) are selected assertTrue(b.selectedInUse); assertEquals(0, b.selected[0]); assertEquals(1, b.selected[1]); assertEquals(2, b.size); } private VectorizedRowBatch getVectorizedRowBatch1DecimalCol() { VectorizedRowBatch b = new VectorizedRowBatch(1); DecimalColumnVector v0; b.cols[0] = v0 = new DecimalColumnVector(18, 2); v0.vector[0].set(HiveDecimal.create("1.20")); v0.vector[1].set(HiveDecimal.create("-3.30")); v0.vector[2].set(HiveDecimal.create("0")); b.size = 3; return b; } private VectorizedRowBatch getVectorizedRowBatch2DecimalCol() { VectorizedRowBatch b = new VectorizedRowBatch(2); DecimalColumnVector v0, v1; b.cols[0] = v0 = new DecimalColumnVector(18, 2); v0.vector[0].set(HiveDecimal.create("1.20")); v0.vector[1].set(HiveDecimal.create("-3.30")); v0.vector[2].set(HiveDecimal.create("0")); b.cols[1] = v1 = new DecimalColumnVector(18, 2); v1.vector[0].set(HiveDecimal.create("-1.00")); v1.vector[1].set(HiveDecimal.create("-3.30")); v1.vector[2].set(HiveDecimal.create("10.00")); b.size = 3; return b; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.performance.storage; import java.io.File; import java.util.Collection; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.paging.PagingManager; import org.apache.activemq.artemis.core.paging.PagingStore; import org.apache.activemq.artemis.core.paging.cursor.PageCursorProvider; import org.apache.activemq.artemis.core.paging.impl.Page; import org.apache.activemq.artemis.core.persistence.OperationContext; import org.apache.activemq.artemis.core.persistence.StorageManager; import org.apache.activemq.artemis.core.replication.ReplicationManager; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.JournalType; import org.apache.activemq.artemis.core.server.RouteContextList; import org.apache.activemq.artemis.core.server.ServerMessage; import org.apache.activemq.artemis.core.server.impl.ServerMessageImpl; import org.apache.activemq.artemis.core.settings.impl.AddressFullMessagePolicy; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.core.transaction.Transaction; import org.apache.activemq.artemis.tests.util.ActiveMQTestBase; import org.junit.Assert; import org.junit.Test; public class PersistMultiThreadTest extends ActiveMQTestBase { final String DIRECTORY = "./target/journaltmp"; FakePagingStore fakePagingStore = new FakePagingStore(); @Test public void testMultipleWrites() throws Exception { deleteDirectory(new File(DIRECTORY)); ActiveMQServer server = createServer(true); server.getConfiguration().setJournalCompactMinFiles(ActiveMQDefaultConfiguration.getDefaultJournalCompactMinFiles()); server.getConfiguration().setJournalCompactPercentage(ActiveMQDefaultConfiguration.getDefaultJournalCompactPercentage()); server.getConfiguration().setJournalDirectory(DIRECTORY + "/journal"); server.getConfiguration().setBindingsDirectory(DIRECTORY + "/bindings"); server.getConfiguration().setPagingDirectory(DIRECTORY + "/paging"); server.getConfiguration().setLargeMessagesDirectory(DIRECTORY + "/largemessage"); server.getConfiguration().setJournalFileSize(10 * 1024 * 1024); server.getConfiguration().setJournalMinFiles(2); server.getConfiguration().setJournalType(JournalType.ASYNCIO); server.start(); StorageManager storage = server.getStorageManager(); long msgID = storage.generateID(); System.out.println("msgID=" + msgID); int NUMBER_OF_THREADS = 50; int NUMBER_OF_MESSAGES = 5000; MyThread[] threads = new MyThread[NUMBER_OF_THREADS]; final CountDownLatch alignFlag = new CountDownLatch(NUMBER_OF_THREADS); final CountDownLatch startFlag = new CountDownLatch(1); final CountDownLatch finishFlag = new CountDownLatch(NUMBER_OF_THREADS); MyDeleteThread deleteThread = new MyDeleteThread("deleteThread", storage, NUMBER_OF_MESSAGES * NUMBER_OF_THREADS * 10); deleteThread.start(); for (int i = 0; i < threads.length; i++) { threads[i] = new MyThread("writer::" + i, storage, NUMBER_OF_MESSAGES, alignFlag, startFlag, finishFlag); } for (MyThread t : threads) { t.start(); } alignFlag.await(); long startTime = System.currentTimeMillis(); startFlag.countDown(); // I'm using a countDown to avoid measuring time spent on thread context from join. // i.e. i want to measure as soon as the loops are done finishFlag.await(); long endtime = System.currentTimeMillis(); System.out.println("Time:: " + (endtime - startTime)); for (MyThread t : threads) { t.join(); Assert.assertEquals(0, t.errors.get()); } deleteThread.join(); Assert.assertEquals(0, deleteThread.errors.get()); } LinkedBlockingDeque<Long> deletes = new LinkedBlockingDeque<>(); class MyThread extends Thread { final StorageManager storage; final int numberOfMessages; final AtomicInteger errors = new AtomicInteger(0); final CountDownLatch align; final CountDownLatch start; final CountDownLatch finish; MyThread(String name, StorageManager storage, int numberOfMessages, CountDownLatch align, CountDownLatch start, CountDownLatch finish) { super(name); this.storage = storage; this.numberOfMessages = numberOfMessages; this.align = align; this.start = start; this.finish = finish; } @Override public void run() { try { align.countDown(); start.await(); long id = storage.generateID(); long txID = storage.generateID(); // each thread will store a single message that will never be deleted, trying to force compacting to happen storeMessage(txID, id); storage.commit(txID); OperationContext ctx = storage.getContext(); for (int i = 0; i < numberOfMessages; i++) { txID = storage.generateID(); long[] messageID = new long[10]; for (int msgI = 0; msgI < 10; msgI++) { id = storage.generateID(); messageID[msgI] = id; storeMessage(txID, id); } storage.commit(txID); ctx.waitCompletion(); for (long deleteID : messageID) { deletes.add(deleteID); } } } catch (Exception e) { e.printStackTrace(); errors.incrementAndGet(); } finally { finish.countDown(); } } private void storeMessage(long txID, long id) throws Exception { ServerMessage message = new ServerMessageImpl(id, 10 * 1024); message.setPagingStore(fakePagingStore); message.getBodyBuffer().writeBytes(new byte[104]); message.putStringProperty("hello", "" + id); storage.storeMessageTransactional(txID, message); storage.storeReferenceTransactional(txID, 1, id); message.decrementRefCount(); } } class MyDeleteThread extends Thread { final StorageManager storage; final int numberOfMessages; final AtomicInteger errors = new AtomicInteger(0); MyDeleteThread(String name, StorageManager storage, int numberOfMessages) { super(name); this.storage = storage; this.numberOfMessages = numberOfMessages; } @Override public void run() { long deletesNr = 0; try { for (int i = 0; i < numberOfMessages; i++) { if (i % 1000 == 0) { // storage.getContext().waitCompletion(); // deletesNr = 0; // Thread.sleep(200); } deletesNr++; Long deleteID = deletes.poll(10, TimeUnit.MINUTES); if (deleteID == null) { System.err.println("Coudn't poll delete info"); errors.incrementAndGet(); break; } storage.storeAcknowledge(1, deleteID); storage.deleteMessage(deleteID); } } catch (Exception e) { e.printStackTrace(System.out); errors.incrementAndGet(); } finally { System.err.println("Finished the delete loop!!!! deleted " + deletesNr); } } } class FakePagingStore implements PagingStore { @Override public SimpleString getAddress() { return null; } @Override public int getNumberOfPages() { return 0; } @Override public int getCurrentWritingPage() { return 0; } @Override public SimpleString getStoreName() { return null; } @Override public File getFolder() { return null; } @Override public AddressFullMessagePolicy getAddressFullMessagePolicy() { return null; } @Override public long getFirstPage() { return 0; } @Override public long getPageSizeBytes() { return 0; } @Override public long getAddressSize() { return 0; } @Override public long getMaxSize() { return 0; } @Override public boolean isFull() { return false; } @Override public boolean isRejectingMessages() { return false; } @Override public void applySetting(AddressSettings addressSettings) { } @Override public boolean isPaging() { return false; } @Override public void sync() throws Exception { } @Override public void ioSync() throws Exception { } @Override public boolean page(ServerMessage message, Transaction tx, RouteContextList listCtx, ReentrantReadWriteLock.ReadLock readLock) throws Exception { return false; } @Override public Page createPage(int page) throws Exception { return null; } @Override public boolean checkPageFileExists(int page) throws Exception { return false; } @Override public PagingManager getPagingManager() { return null; } @Override public PageCursorProvider getCursorProvider() { return null; } @Override public void processReload() throws Exception { } @Override public Page depage() throws Exception { return null; } @Override public void forceAnotherPage() throws Exception { } @Override public Page getCurrentPage() { return null; } @Override public boolean startPaging() throws Exception { return false; } @Override public void stopPaging() throws Exception { } @Override public void addSize(int size) { } @Override public boolean checkMemory(Runnable runnable) { return false; } @Override public boolean lock(long timeout) { return false; } @Override public void unlock() { } @Override public void flushExecutors() { } @Override public Collection<Integer> getCurrentIds() throws Exception { return null; } @Override public void sendPages(ReplicationManager replicator, Collection<Integer> pageIds) throws Exception { } @Override public void disableCleanup() { } @Override public void enableCleanup() { } @Override public void start() throws Exception { } @Override public void stop() throws Exception { } @Override public boolean isStarted() { return false; } @Override public boolean checkReleasedMemory() { return true; } } }
package sh.isaac.api.coordinate; import org.eclipse.collections.api.set.ImmutableSet; import org.jvnet.hk2.annotations.Service; import sh.isaac.api.Get; import sh.isaac.api.StaticIsaacCache; import sh.isaac.api.collections.jsr166y.ConcurrentReferenceHashMap; import sh.isaac.api.component.concept.ConceptSpecification; import sh.isaac.api.externalizable.ByteArrayDataBuffer; import sh.isaac.api.marshal.Marshaler; import sh.isaac.api.marshal.Unmarshaler; //This class is not treated as a service, however, it needs the annotation, so that the reset() gets fired at appropriate times. @Service public final class StampPositionImmutable implements StampPosition, Comparable<StampPosition>, ImmutableCoordinate, StaticIsaacCache { private static final ConcurrentReferenceHashMap<StampPositionImmutable, StampPositionImmutable> SINGLETONS = new ConcurrentReferenceHashMap<>(ConcurrentReferenceHashMap.ReferenceType.WEAK, ConcurrentReferenceHashMap.ReferenceType.WEAK); public static final int marshalVersion = 1; /** The time. */ private final long time; private final int pathForPositionNid; private transient StampPathImmutable stampPath; private StampPositionImmutable() { // No arg constructor for HK2 managed instance // This instance just enables reset functionality... this.time = Long.MIN_VALUE; this.pathForPositionNid = Integer.MAX_VALUE; } @Override public void reset() { SINGLETONS.clear(); } /** * Instantiates a new stamp position impl. * * @param time the time * @param pathForPositionNid the path nid */ private StampPositionImmutable(long time, int pathForPositionNid) { this.time = time; this.pathForPositionNid = pathForPositionNid; } public static StampPositionImmutable make(long time, int pathForPositionNid) { return SINGLETONS.computeIfAbsent(new StampPositionImmutable(time, pathForPositionNid), stampPositionImmutable -> stampPositionImmutable); } public static StampPositionImmutable make(long time, ConceptSpecification pathForPositionSpecification) { return SINGLETONS.computeIfAbsent(new StampPositionImmutable(time, pathForPositionSpecification.getNid()), stampPositionImmutable -> stampPositionImmutable); } @Override @Marshaler public void marshal(ByteArrayDataBuffer out) { out.putInt(marshalVersion); out.putLong(this.time); out.putNid(this.pathForPositionNid); } @Unmarshaler public static StampPositionImmutable make(ByteArrayDataBuffer in) { int objectMarshalVersion = in.getInt(); switch (objectMarshalVersion) { case marshalVersion: return new StampPositionImmutable(in.getLong(), in.getNid()); default: throw new UnsupportedOperationException("Unsupported version: " + objectMarshalVersion); } } /** * Gets the time. * * @return the time */ @Override public long getTime() { return this.time; } @Override public StampPositionImmutable toStampPositionImmutable() { return this; } /** * Compare to. * * @param o the o * @return the int */ @Override public int compareTo(StampPosition o) { final int comparison = Long.compare(this.time, o.getTime()); if (comparison != 0) { return comparison; } return Integer.compare(this.pathForPositionNid, o.getPathForPositionNid()); } @Override public int getPathForPositionNid() { return this.pathForPositionNid; } /** * Gets the stamp path concept nid. * * @return the stamp path concept nid */ public ConceptSpecification getPathForPositionConcept() { return Get.conceptSpecification(this.pathForPositionNid); } /** * Equals. * * @param obj the obj * @return true, if successful */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof StampPosition)) { return false; } final StampPosition other = (StampPosition) obj; if (this.time != other.getTime()) { return false; } return this.pathForPositionNid == other.getPathForPositionNid(); } /** * Hash code. * * @return the int */ @Override public int hashCode() { int hash = 7; hash = 83 * hash + (int) (this.time ^ (this.time >>> 32)); hash = 83 * hash + Integer.hashCode(this.pathForPositionNid); return hash; } /** * To string. * * @return the string */ @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("StampPosition:{"); if (this.time == Long.MAX_VALUE) { sb.append("latest"); } else if (this.time == Long.MIN_VALUE) { sb.append("CANCELED"); } else { sb.append(getTimeAsInstant()); } sb.append(" on '") .append(Get.conceptDescriptionText(this.pathForPositionNid)) .append("' path}"); return sb.toString(); } public ImmutableSet<StampPositionImmutable> getPathOrigins() { if (this.stampPath == null) { this.stampPath = StampPathImmutable.make(getPathForPositionNid()); } return this.stampPath.getPathOrigins(); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vfs.newvfs.persistent; import com.intellij.concurrency.JobSchedulerImpl; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ApplicationComponent; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.LowMemoryWatcher; import com.intellij.openapi.util.ShutDownTracker; import com.intellij.openapi.util.io.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.*; import com.intellij.openapi.vfs.ex.temp.TempFileSystem; import com.intellij.openapi.vfs.newvfs.*; import com.intellij.openapi.vfs.newvfs.events.*; import com.intellij.openapi.vfs.newvfs.impl.*; import com.intellij.util.*; import com.intellij.util.containers.ConcurrentIntObjectMap; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.EmptyIntHashSet; import com.intellij.util.io.ReplicatorInputStream; import com.intellij.util.io.URLUtil; import com.intellij.util.messages.MessageBus; import gnu.trove.*; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.io.*; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; /** * @author max */ public class PersistentFSImpl extends PersistentFS implements ApplicationComponent { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.vfs.newvfs.persistent.PersistentFS"); private final MessageBus myEventBus; private final Map<String, VirtualFileSystemEntry> myRoots = ContainerUtil.newConcurrentMap(10, 0.4f, JobSchedulerImpl.CORES_COUNT, FileUtil.PATH_HASHING_STRATEGY); private final ConcurrentIntObjectMap<VirtualFileSystemEntry> myRootsById = ContainerUtil.createConcurrentIntObjectMap(10, 0.4f, JobSchedulerImpl.CORES_COUNT); // FS roots must be in this map too. findFileById() relies on this. private final ConcurrentIntObjectMap<VirtualFileSystemEntry> myIdToDirCache = ContainerUtil.createConcurrentIntObjectMap(); private final Object myInputLock = new Object(); private final AtomicBoolean myShutDown = new AtomicBoolean(false); @SuppressWarnings({"FieldCanBeLocal", "unused"}) private final LowMemoryWatcher myWatcher = LowMemoryWatcher.register(this::clearIdCache); private volatile int myStructureModificationCount; public PersistentFSImpl(@NotNull MessageBus bus) { myEventBus = bus; ShutDownTracker.getInstance().registerShutdownTask(this::performShutdown); } @Override public void initComponent() { FSRecords.connect(); } @Override public void disposeComponent() { performShutdown(); } private void performShutdown() { if (myShutDown.compareAndSet(false, true)) { LOG.info("VFS dispose started"); FSRecords.dispose(); LOG.info("VFS dispose completed"); } } @Override @NonNls @NotNull public String getComponentName() { return "app.component.PersistentFS"; } @Override public boolean areChildrenLoaded(@NotNull final VirtualFile dir) { return areChildrenLoaded(getFileId(dir)); } @Override public long getCreationTimestamp() { return FSRecords.getCreationTimestamp(); } @NotNull private static NewVirtualFileSystem getDelegate(@NotNull VirtualFile file) { return (NewVirtualFileSystem)file.getFileSystem(); } @Override public boolean wereChildrenAccessed(@NotNull final VirtualFile dir) { return FSRecords.wereChildrenAccessed(getFileId(dir)); } @Override @NotNull public String[] list(@NotNull final VirtualFile file) { int id = getFileId(file); FSRecords.NameId[] nameIds = FSRecords.listAll(id); if (!areChildrenLoaded(id)) { nameIds = persistAllChildren(file, id, nameIds); } return ContainerUtil.map2Array(nameIds, String.class, id1 -> id1.name.toString()); } @Override @NotNull public String[] listPersisted(@NotNull VirtualFile parent) { return listPersisted(FSRecords.list(getFileId(parent))); } @NotNull private static String[] listPersisted(@NotNull int[] childrenIds) { String[] names = ArrayUtil.newStringArray(childrenIds.length); for (int i = 0; i < childrenIds.length; i++) { names[i] = FSRecords.getName(childrenIds[i]); } return names; } @NotNull private static FSRecords.NameId[] persistAllChildren(@NotNull final VirtualFile file, final int id, @NotNull FSRecords.NameId[] current) { final NewVirtualFileSystem fs = replaceWithNativeFS(getDelegate(file)); String[] delegateNames = VfsUtil.filterNames(fs.list(file)); if (delegateNames.length == 0 && current.length > 0) { return current; } Set<String> toAdd = ContainerUtil.newHashSet(delegateNames); for (FSRecords.NameId nameId : current) { toAdd.remove(nameId.name.toString()); } final TIntArrayList childrenIds = new TIntArrayList(current.length + toAdd.size()); final List<FSRecords.NameId> nameIds = ContainerUtil.newArrayListWithCapacity(current.length + toAdd.size()); for (FSRecords.NameId nameId : current) { childrenIds.add(nameId.id); nameIds.add(nameId); } for (String newName : toAdd) { FakeVirtualFile child = new FakeVirtualFile(file, newName); FileAttributes attributes = fs.getAttributes(child); if (attributes != null) { int childId = createAndFillRecord(fs, child, id, attributes); childrenIds.add(childId); nameIds.add(new FSRecords.NameId(childId, FileNameCache.storeName(newName), newName)); } } FSRecords.updateList(id, childrenIds.toNativeArray()); setChildrenCached(id); return nameIds.toArray(new FSRecords.NameId[nameIds.size()]); } private static void setChildrenCached(int id) { int flags = FSRecords.getFlags(id); FSRecords.setFlags(id, flags | CHILDREN_CACHED_FLAG, true); } @Override @NotNull public FSRecords.NameId[] listAll(@NotNull VirtualFile parent) { final int parentId = getFileId(parent); FSRecords.NameId[] nameIds = FSRecords.listAll(parentId); if (!areChildrenLoaded(parentId)) { return persistAllChildren(parent, parentId, nameIds); } return nameIds; } private static boolean areChildrenLoaded(final int parentId) { return BitUtil.isSet(FSRecords.getFlags(parentId), CHILDREN_CACHED_FLAG); } @Override @Nullable public DataInputStream readAttribute(@NotNull final VirtualFile file, @NotNull final FileAttribute att) { return FSRecords.readAttributeWithLock(getFileId(file), att); } @Override @NotNull public DataOutputStream writeAttribute(@NotNull final VirtualFile file, @NotNull final FileAttribute att) { return FSRecords.writeAttribute(getFileId(file), att); } @Nullable private static DataInputStream readContent(@NotNull VirtualFile file) { return FSRecords.readContent(getFileId(file)); } @Nullable private static DataInputStream readContentById(int contentId) { return FSRecords.readContentById(contentId); } @NotNull private static DataOutputStream writeContent(@NotNull VirtualFile file, boolean readOnly) { return FSRecords.writeContent(getFileId(file), readOnly); } private static void writeContent(@NotNull VirtualFile file, ByteSequence content, boolean readOnly) throws IOException { FSRecords.writeContent(getFileId(file), content, readOnly); } @Override public int storeUnlinkedContent(@NotNull byte[] bytes) { return FSRecords.storeUnlinkedContent(bytes); } @Override public int getModificationCount(@NotNull final VirtualFile file) { return FSRecords.getModCount(getFileId(file)); } @Override public int getModificationCount() { return FSRecords.getLocalModCount(); } @Override public int getStructureModificationCount() { return myStructureModificationCount; } public void incStructuralModificationCount() { myStructureModificationCount++; } @Override public int getFilesystemModificationCount() { return FSRecords.getModCount(); } private static boolean writeAttributesToRecord(final int id, final int parentId, @NotNull VirtualFile file, @NotNull NewVirtualFileSystem fs, @NotNull FileAttributes attributes) { assert id > 0 : id; String name = file.getName(); if (!name.isEmpty()) { if (namesEqual(fs, name, FSRecords.getNameSequence(id))) return false; // TODO: Handle root attributes change. } else { if (areChildrenLoaded(id)) return false; // TODO: hack } FSRecords.writeAttributesToRecord(id, parentId, attributes, name); return true; } @Override public int getFileAttributes(int id) { assert id > 0; //noinspection MagicConstant return FSRecords.getFlags(id); } @Override public boolean isDirectory(@NotNull final VirtualFile file) { return isDirectory(getFileAttributes(getFileId(file))); } private static boolean namesEqual(@NotNull VirtualFileSystem fs, @NotNull CharSequence n1, CharSequence n2) { return Comparing.equal(n1, n2, fs.isCaseSensitive()); } @Override public boolean exists(@NotNull final VirtualFile fileOrDirectory) { return ((VirtualFileWithId)fileOrDirectory).getId() > 0; } @Override public long getTimeStamp(@NotNull final VirtualFile file) { return FSRecords.getTimestamp(getFileId(file)); } @Override public void setTimeStamp(@NotNull final VirtualFile file, final long modStamp) throws IOException { final int id = getFileId(file); FSRecords.setTimestamp(id, modStamp); getDelegate(file).setTimeStamp(file, modStamp); } private static int getFileId(@NotNull VirtualFile file) { final int id = ((VirtualFileWithId)file).getId(); if (id <= 0) { throw new InvalidVirtualFileAccessException(file); } return id; } @Override public boolean isSymLink(@NotNull VirtualFile file) { return isSymLink(getFileAttributes(getFileId(file))); } @Override public String resolveSymLink(@NotNull VirtualFile file) { throw new UnsupportedOperationException(); } @Override public boolean isWritable(@NotNull VirtualFile file) { return !BitUtil.isSet(getFileAttributes(getFileId(file)), IS_READ_ONLY); } @Override public boolean isHidden(@NotNull VirtualFile file) { return BitUtil.isSet(getFileAttributes(getFileId(file)), IS_HIDDEN); } @Override public void setWritable(@NotNull final VirtualFile file, final boolean writableFlag) throws IOException { getDelegate(file).setWritable(file, writableFlag); boolean oldWritable = isWritable(file); if (oldWritable != writableFlag) { processEvent(new VFilePropertyChangeEvent(this, file, VirtualFile.PROP_WRITABLE, oldWritable, writableFlag, false)); } } @Override public int getId(@NotNull VirtualFile parent, @NotNull String childName, @NotNull NewVirtualFileSystem fs) { int parentId = getFileId(parent); int[] children = FSRecords.list(parentId); if (children.length > 0) { // fast path, check that some child has same nameId as given name, this avoid O(N) on retrieving names for processing non-cached children int nameId = FSRecords.getNameId(childName); for (final int childId : children) { if (nameId == FSRecords.getNameId(childId)) { return childId; } } // for case sensitive system the above check is exhaustive in consistent state of vfs } for (final int childId : children) { if (namesEqual(fs, childName, FSRecords.getNameSequence(childId))) return childId; } final VirtualFile fake = new FakeVirtualFile(parent, childName); final FileAttributes attributes = fs.getAttributes(fake); if (attributes != null) { final int child = createAndFillRecord(fs, fake, parentId, attributes); FSRecords.updateList(parentId, ArrayUtil.append(children, child)); return child; } return 0; } @Override public long getLength(@NotNull VirtualFile file) { long len; if (mustReloadContent(file)) { len = reloadLengthFromDelegate(file, getDelegate(file)); } else { len = getLastRecordedLength(file); } return len; } @Override public long getLastRecordedLength(@NotNull VirtualFile file) { int id = getFileId(file); return FSRecords.getLength(id); } @NotNull @Override public VirtualFile copyFile(Object requestor, @NotNull VirtualFile file, @NotNull VirtualFile parent, @NotNull String name) throws IOException { getDelegate(file).copyFile(requestor, file, parent, name); processEvent(new VFileCopyEvent(requestor, file, parent, name)); final VirtualFile child = parent.findChild(name); if (child == null) { throw new IOException("Cannot create child"); } return child; } @NotNull @Override public VirtualFile createChildDirectory(Object requestor, @NotNull VirtualFile parent, @NotNull String dir) throws IOException { getDelegate(parent).createChildDirectory(requestor, parent, dir); processEvent(new VFileCreateEvent(requestor, parent, dir, true, false)); final VirtualFile child = parent.findChild(dir); if (child == null) { throw new IOException("Cannot create child directory '" + dir + "' at " + parent.getPath()); } return child; } @NotNull @Override public VirtualFile createChildFile(Object requestor, @NotNull VirtualFile parent, @NotNull String file) throws IOException { getDelegate(parent).createChildFile(requestor, parent, file); processEvent(new VFileCreateEvent(requestor, parent, file, false, false)); final VirtualFile child = parent.findChild(file); if (child == null) { throw new IOException("Cannot create child file '" + file + "' at " + parent.getPath()); } return child; } @Override public void deleteFile(final Object requestor, @NotNull final VirtualFile file) throws IOException { final NewVirtualFileSystem delegate = getDelegate(file); delegate.deleteFile(requestor, file); if (!delegate.exists(file)) { processEvent(new VFileDeleteEvent(requestor, file, false)); } } @Override public void renameFile(final Object requestor, @NotNull VirtualFile file, @NotNull String newName) throws IOException { getDelegate(file).renameFile(requestor, file, newName); String oldName = file.getName(); if (!newName.equals(oldName)) { processEvent(new VFilePropertyChangeEvent(requestor, file, VirtualFile.PROP_NAME, oldName, newName, false)); } } @Override @NotNull public byte[] contentsToByteArray(@NotNull final VirtualFile file) throws IOException { return contentsToByteArray(file, true); } @Override @NotNull public byte[] contentsToByteArray(@NotNull final VirtualFile file, boolean cacheContent) throws IOException { InputStream contentStream = null; boolean reloadFromDelegate; boolean outdated; int fileId; long length = -1L; synchronized (myInputLock) { fileId = getFileId(file); outdated = checkFlag(fileId, MUST_RELOAD_CONTENT) || (length = FSRecords.getLength(fileId)) == -1L; reloadFromDelegate = outdated || (contentStream = readContent(file)) == null; } if (reloadFromDelegate) { final NewVirtualFileSystem delegate = getDelegate(file); final byte[] content; if (outdated) { // in this case, file can have out-of-date length. so, update it first (it's needed for correct contentsToByteArray() work) // see IDEA-90813 for possible bugs FSRecords.setLength(fileId, delegate.getLength(file)); content = delegate.contentsToByteArray(file); } else { // a bit of optimization content = delegate.contentsToByteArray(file); FSRecords.setLength(fileId, content.length); } Application application = ApplicationManager.getApplication(); // we should cache every local files content // because the local history feature is currently depends on this cache, // perforce offline mode as well if ((!delegate.isReadOnly() || // do not cache archive content unless asked cacheContent && !application.isInternal() && !application.isUnitTestMode()) && content.length <= PersistentFSConstants.FILE_LENGTH_TO_CACHE_THRESHOLD) { synchronized (myInputLock) { writeContent(file, new ByteSequence(content), delegate.isReadOnly()); setFlag(file, MUST_RELOAD_CONTENT, false); } } return content; } else { try { assert length >= 0 : file; return FileUtil.loadBytes(contentStream, (int)length); } catch (IOException e) { FSRecords.handleError(e); return ArrayUtil.EMPTY_BYTE_ARRAY; } } } @Override @NotNull public byte[] contentsToByteArray(int contentId) throws IOException { final DataInputStream stream = readContentById(contentId); assert stream != null : contentId; return FileUtil.loadBytes(stream); } @Override @NotNull public InputStream getInputStream(@NotNull final VirtualFile file) throws IOException { synchronized (myInputLock) { InputStream contentStream; if (mustReloadContent(file) || (contentStream = readContent(file)) == null) { NewVirtualFileSystem delegate = getDelegate(file); long len = reloadLengthFromDelegate(file, delegate); InputStream nativeStream = delegate.getInputStream(file); if (len > PersistentFSConstants.FILE_LENGTH_TO_CACHE_THRESHOLD) return nativeStream; return createReplicator(file, nativeStream, len, delegate.isReadOnly()); } else { return contentStream; } } } private static long reloadLengthFromDelegate(@NotNull VirtualFile file, @NotNull NewVirtualFileSystem delegate) { final long len = delegate.getLength(file); FSRecords.setLength(getFileId(file), len); return len; } private InputStream createReplicator(@NotNull final VirtualFile file, final InputStream nativeStream, final long fileLength, final boolean readOnly) throws IOException { if (nativeStream instanceof BufferExposingByteArrayInputStream) { // optimization BufferExposingByteArrayInputStream byteStream = (BufferExposingByteArrayInputStream )nativeStream; byte[] bytes = byteStream.getInternalBuffer(); storeContentToStorage(fileLength, file, readOnly, bytes, bytes.length); return nativeStream; } @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") final BufferExposingByteArrayOutputStream cache = new BufferExposingByteArrayOutputStream((int)fileLength); return new ReplicatorInputStream(nativeStream, cache) { @Override public void close() throws IOException { super.close(); storeContentToStorage(fileLength, file, readOnly, cache.getInternalBuffer(), cache.size()); } }; } private void storeContentToStorage(long fileLength, @NotNull VirtualFile file, boolean readOnly, @NotNull byte[] bytes, int bytesLength) throws IOException { synchronized (myInputLock) { if (bytesLength == fileLength) { writeContent(file, new ByteSequence(bytes, 0, bytesLength), readOnly); setFlag(file, MUST_RELOAD_CONTENT, false); } else { setFlag(file, MUST_RELOAD_CONTENT, true); } } } private static boolean mustReloadContent(@NotNull VirtualFile file) { int fileId = getFileId(file); return checkFlag(fileId, MUST_RELOAD_CONTENT) || FSRecords.getLength(fileId) == -1L; } @Override @NotNull public OutputStream getOutputStream(@NotNull final VirtualFile file, final Object requestor, final long modStamp, final long timeStamp) throws IOException { return new ByteArrayOutputStream() { private boolean closed; // protection against user calling .close() twice @Override public void close() throws IOException { if (closed) return; super.close(); ApplicationManager.getApplication().assertWriteAccessAllowed(); VFileContentChangeEvent event = new VFileContentChangeEvent(requestor, file, file.getModificationStamp(), modStamp, false); List<VFileContentChangeEvent> events = Collections.singletonList(event); BulkFileListener publisher = myEventBus.syncPublisher(VirtualFileManager.VFS_CHANGES); publisher.before(events); NewVirtualFileSystem delegate = getDelegate(file); OutputStream ioFileStream = delegate.getOutputStream(file, requestor, modStamp, timeStamp); // FSRecords.ContentOutputStream already buffered, no need to wrap in BufferedStream OutputStream persistenceStream = writeContent(file, delegate.isReadOnly()); try { persistenceStream.write(buf, 0, count); } finally { try { ioFileStream.write(buf, 0, count); } finally { closed = true; persistenceStream.close(); ioFileStream.close(); executeTouch(file, false, event.getModificationStamp()); publisher.after(events); } } } }; } @Override public int acquireContent(@NotNull VirtualFile file) { return FSRecords.acquireFileContent(getFileId(file)); } @Override public void releaseContent(int contentId) { FSRecords.releaseContent(contentId); } @Override public int getCurrentContentId(@NotNull VirtualFile file) { return FSRecords.getContentId(getFileId(file)); } @Override public void moveFile(final Object requestor, @NotNull final VirtualFile file, @NotNull final VirtualFile newParent) throws IOException { getDelegate(file).moveFile(requestor, file, newParent); processEvent(new VFileMoveEvent(requestor, file, newParent)); } private void processEvent(@NotNull VFileEvent event) { processEvents(Collections.singletonList(event)); } private static class EventWrapper { private final VFileDeleteEvent event; private final int id; private EventWrapper(final VFileDeleteEvent event, final int id) { this.event = event; this.id = id; } } @NotNull private static final Comparator<EventWrapper> DEPTH_COMPARATOR = (o1, o2) -> o1.event.getFileDepth() - o2.event.getFileDepth(); @NotNull private static List<VFileEvent> validateEvents(@NotNull List<VFileEvent> events) { final List<EventWrapper> deletionEvents = ContainerUtil.newArrayList(); for (int i = 0, size = events.size(); i < size; i++) { final VFileEvent event = events.get(i); if (event instanceof VFileDeleteEvent && event.isValid()) { deletionEvents.add(new EventWrapper((VFileDeleteEvent)event, i)); } } final TIntHashSet invalidIDs; if (deletionEvents.isEmpty()) { invalidIDs = EmptyIntHashSet.INSTANCE; } else { ContainerUtil.quickSort(deletionEvents, DEPTH_COMPARATOR); invalidIDs = new TIntHashSet(deletionEvents.size()); final Set<VirtualFile> dirsToBeDeleted = new THashSet<>(deletionEvents.size()); nextEvent: for (EventWrapper wrapper : deletionEvents) { final VirtualFile candidate = wrapper.event.getFile(); VirtualFile parent = candidate; while (parent != null) { if (dirsToBeDeleted.contains(parent)) { invalidIDs.add(wrapper.id); continue nextEvent; } parent = parent.getParent(); } if (candidate.isDirectory()) { dirsToBeDeleted.add(candidate); } } } final List<VFileEvent> filtered = new ArrayList<>(events.size() - invalidIDs.size()); for (int i = 0, size = events.size(); i < size; i++) { final VFileEvent event = events.get(i); if (event.isValid() && !(event instanceof VFileDeleteEvent && invalidIDs.contains(i))) { filtered.add(event); } } return filtered; } @Override public void processEvents(@NotNull List<VFileEvent> events) { ApplicationManager.getApplication().assertWriteAccessAllowed(); List<VFileEvent> validated = validateEvents(events); BulkFileListener publisher = myEventBus.syncPublisher(VirtualFileManager.VFS_CHANGES); publisher.before(validated); THashMap<VirtualFile, List<VFileEvent>> parentToChildrenEventsChanges = null; for (VFileEvent event : validated) { VirtualFile changedParent = null; if (event instanceof VFileCreateEvent) { changedParent = ((VFileCreateEvent)event).getParent(); ((VFileCreateEvent)event).resetCache(); } else if (event instanceof VFileDeleteEvent) { changedParent = ((VFileDeleteEvent)event).getFile().getParent(); } if (changedParent != null) { if (parentToChildrenEventsChanges == null) parentToChildrenEventsChanges = new THashMap<>(); List<VFileEvent> parentChildrenChanges = parentToChildrenEventsChanges.get(changedParent); if (parentChildrenChanges == null) { parentToChildrenEventsChanges.put(changedParent, parentChildrenChanges = new SmartList<>()); } parentChildrenChanges.add(event); } else { applyEvent(event); } } if (parentToChildrenEventsChanges != null) { parentToChildrenEventsChanges.forEachEntry((parent, childrenEvents) -> { applyChildrenChangeEvents(parent, childrenEvents); return true; }); parentToChildrenEventsChanges.clear(); } publisher.after(validated); } private void applyChildrenChangeEvents(@NotNull VirtualFile parent, @NotNull List<VFileEvent> events) { final NewVirtualFileSystem delegate = getDelegate(parent); TIntArrayList childrenIdsUpdated = new TIntArrayList(); final int parentId = getFileId(parent); assert parentId != 0; TIntHashSet parentChildrenIds = new TIntHashSet(FSRecords.list(parentId)); boolean hasRemovedChildren = false; List<VirtualFile> childrenToBeUpdated = new SmartList<>(); for (VFileEvent event : events) { if (event instanceof VFileCreateEvent) { String name = ((VFileCreateEvent)event).getChildName(); final VirtualFile fake = new FakeVirtualFile(parent, name); final FileAttributes attributes = delegate.getAttributes(fake); if (attributes != null) { final int childId = createAndFillRecord(delegate, fake, parentId, attributes); assert parent instanceof VirtualDirectoryImpl : parent; final VirtualDirectoryImpl dir = (VirtualDirectoryImpl)parent; VirtualFileSystemEntry child = dir.createChild(name, childId, dir.getFileSystem()); childrenToBeUpdated.add(child); childrenIdsUpdated.add(childId); parentChildrenIds.add(childId); } } else if (event instanceof VFileDeleteEvent) { VirtualFile file = ((VFileDeleteEvent)event).getFile(); if (!file.exists()) { LOG.error("Deleting a file, which does not exist: " + file.getPath()); continue; } hasRemovedChildren = true; int id = getFileId(file); childrenToBeUpdated.add(file); childrenIdsUpdated.add(-id); parentChildrenIds.remove(id); } } FSRecords.updateList(parentId, parentChildrenIds.toArray()); if (hasRemovedChildren) clearIdCache(); VirtualDirectoryImpl parentImpl = (VirtualDirectoryImpl)parent; for (int i = 0, len = childrenIdsUpdated.size(); i < len; ++i) { final int childId = childrenIdsUpdated.get(i); final VirtualFile childFile = childrenToBeUpdated.get(i); if (childId > 0) { parentImpl.addChild((VirtualFileSystemEntry)childFile); } else { FSRecords.deleteRecordRecursively(-childId); parentImpl.removeChild(childFile); invalidateSubtree(childFile); } } } @Override @Nullable public VirtualFileSystemEntry findRoot(@NotNull final String basePath, @NotNull NewVirtualFileSystem fs) { if (basePath.isEmpty()) { LOG.error("Invalid root, fs=" + fs); return null; } String rootUrl = normalizeRootUrl(basePath, fs); VirtualFileSystemEntry root = myRoots.get(rootUrl); if (root != null) return root; String rootName; if (fs instanceof ArchiveFileSystem) { VirtualFile localFile = ((ArchiveFileSystem)fs).findLocalByRootPath(basePath); if (localFile == null) return null; rootName = localFile.getName(); } else { rootName = basePath; } FileAttributes attributes = fs.getAttributes(new StubVirtualFile() { @NotNull @Override public String getPath() { return basePath; } @Nullable @Override public VirtualFile getParent() { return null; } }); if (attributes == null || !attributes.isDirectory()) { return null; } int rootId = FSRecords.findRootRecord(rootUrl); VfsData.Segment segment = VfsData.getSegment(rootId, true); VfsData.DirectoryData directoryData = new VfsData.DirectoryData(); VirtualFileSystemEntry newRoot = new FsRoot(rootId, segment, directoryData, fs, rootName, StringUtil.trimEnd(basePath, "/")); boolean mark; synchronized (myRoots) { root = myRoots.get(rootUrl); if (root != null) return root; try { VfsData.initFile(rootId, segment, -1, directoryData); } catch (VfsData.FileAlreadyCreatedException e) { for (Map.Entry<String, VirtualFileSystemEntry> entry : myRoots.entrySet()) { final VirtualFileSystemEntry existingRoot = entry.getValue(); if (Math.abs(existingRoot.getId()) == rootId) { throw new RuntimeException("Duplicate FS roots: " + rootUrl + " and " + entry.getKey() + ", id=" + rootId + ", valid=" + existingRoot.isValid(), e); } } throw new RuntimeException("No root duplication, roots=" + Arrays.toString(FSRecords.listAll(1)), e); } incStructuralModificationCount(); mark = writeAttributesToRecord(rootId, 0, newRoot, fs, attributes); myRoots.put(rootUrl, newRoot); myRootsById.put(rootId, newRoot); myIdToDirCache.put(rootId, newRoot); } if (!mark && attributes.lastModified != FSRecords.getTimestamp(rootId)) { newRoot.markDirtyRecursively(); } LOG.assertTrue(rootId == newRoot.getId(), "root=" + newRoot + " expected=" + rootId + " actual=" + newRoot.getId()); return newRoot; } @NotNull private static String normalizeRootUrl(@NotNull String basePath, @NotNull NewVirtualFileSystem fs) { // need to protect against relative path of the form "/x/../y" String normalized = VfsImplUtil.normalize(fs, FileUtil.toCanonicalPath(basePath)); String protocol = fs.getProtocol(); StringBuilder result = new StringBuilder(protocol.length() + URLUtil.SCHEME_SEPARATOR.length() + normalized.length()); result.append(protocol).append(URLUtil.SCHEME_SEPARATOR).append(normalized); return StringUtil.endsWithChar(result, '/') ? UriUtil.trimTrailingSlashes(result.toString()) : result.toString(); } @Override public void clearIdCache() { // remove all except myRootsById contents for (Iterator<ConcurrentIntObjectMap.IntEntry<VirtualFileSystemEntry>> iterator = myIdToDirCache.entries().iterator(); iterator.hasNext(); ) { ConcurrentIntObjectMap.IntEntry<VirtualFileSystemEntry> entry = iterator.next(); int id = entry.getKey(); if (!myRootsById.containsKey(id)) { iterator.remove(); } } } @Override @Nullable public NewVirtualFile findFileById(final int id) { return findFileById(id, false); } @Override public NewVirtualFile findFileByIdIfCached(final int id) { return findFileById(id, true); } @Nullable private VirtualFileSystemEntry findFileById(int id, boolean cachedOnly) { VirtualFileSystemEntry cached = myIdToDirCache.get(id); if (cached != null) return cached; TIntArrayList parents = FSRecords.getParents(id, myIdToDirCache); // the last element of the parents is either a root or already cached element int parentId = parents.get(parents.size() - 1); VirtualFileSystemEntry result = myIdToDirCache.get(parentId); for (int i=parents.size() - 2; i>=0; i--) { if (!(result instanceof VirtualDirectoryImpl)) { return null; } parentId = parents.get(i); result = ((VirtualDirectoryImpl)result).findChildById(parentId, cachedOnly); if (result instanceof VirtualDirectoryImpl) { VirtualFileSystemEntry old = myIdToDirCache.putIfAbsent(parentId, result); if (old != null) result = old; } } return result; } @Override @NotNull public VirtualFile[] getRoots() { Collection<VirtualFileSystemEntry> roots = myRoots.values(); return ArrayUtil.stripTrailingNulls(VfsUtilCore.toVirtualFileArray(roots)); } @Override @NotNull public VirtualFile[] getRoots(@NotNull final NewVirtualFileSystem fs) { final List<VirtualFile> roots = new ArrayList<>(); for (NewVirtualFile root : myRoots.values()) { if (root.getFileSystem() == fs) { roots.add(root); } } return VfsUtilCore.toVirtualFileArray(roots); } @Override @NotNull public VirtualFile[] getLocalRoots() { List<VirtualFile> roots = ContainerUtil.newSmartList(); for (NewVirtualFile root : myRoots.values()) { if (root.isInLocalFileSystem() && !(root.getFileSystem() instanceof TempFileSystem)) { roots.add(root); } } return VfsUtilCore.toVirtualFileArray(roots); } private VirtualFileSystemEntry applyEvent(@NotNull VFileEvent event) { if (LOG.isDebugEnabled()) { LOG.debug("Applying " + event); } try { if (event instanceof VFileCreateEvent) { final VFileCreateEvent createEvent = (VFileCreateEvent)event; return executeCreateChild(createEvent.getParent(), createEvent.getChildName()); } else if (event instanceof VFileDeleteEvent) { final VFileDeleteEvent deleteEvent = (VFileDeleteEvent)event; executeDelete(deleteEvent.getFile()); } else if (event instanceof VFileContentChangeEvent) { final VFileContentChangeEvent contentUpdateEvent = (VFileContentChangeEvent)event; executeTouch(contentUpdateEvent.getFile(), contentUpdateEvent.isFromRefresh(), contentUpdateEvent.getModificationStamp()); } else if (event instanceof VFileCopyEvent) { final VFileCopyEvent copyEvent = (VFileCopyEvent)event; return executeCreateChild(copyEvent.getNewParent(), copyEvent.getNewChildName()); } else if (event instanceof VFileMoveEvent) { final VFileMoveEvent moveEvent = (VFileMoveEvent)event; executeMove(moveEvent.getFile(), moveEvent.getNewParent()); } else if (event instanceof VFilePropertyChangeEvent) { final VFilePropertyChangeEvent propertyChangeEvent = (VFilePropertyChangeEvent)event; VirtualFile file = propertyChangeEvent.getFile(); Object newValue = propertyChangeEvent.getNewValue(); if (VirtualFile.PROP_NAME.equals(propertyChangeEvent.getPropertyName())) { executeRename(file, (String)newValue); } else if (VirtualFile.PROP_WRITABLE.equals(propertyChangeEvent.getPropertyName())) { executeSetWritable(file, ((Boolean)newValue).booleanValue()); if (LOG.isDebugEnabled()) { LOG.debug("File " + file + " writable=" + file.isWritable() + " id=" + getFileId(file)); } } else if (VirtualFile.PROP_HIDDEN.equals(propertyChangeEvent.getPropertyName())) { executeSetHidden(file, ((Boolean)newValue).booleanValue()); } else if (VirtualFile.PROP_SYMLINK_TARGET.equals(propertyChangeEvent.getPropertyName())) { executeSetTarget(file, (String)newValue); markForContentReloadRecursively(getFileId(file)); } } } catch (Exception e) { // Exception applying single event should not prevent other events from applying. LOG.error(e); } return null; } @NotNull @NonNls public String toString() { return "PersistentFS"; } private static VirtualFileSystemEntry executeCreateChild(@NotNull VirtualFile parent, @NotNull String name) { final NewVirtualFileSystem delegate = getDelegate(parent); final VirtualFile fake = new FakeVirtualFile(parent, name); final FileAttributes attributes = delegate.getAttributes(fake); if (attributes != null) { final int parentId = getFileId(parent); final int childId = createAndFillRecord(delegate, fake, parentId, attributes); appendIdToParentList(parentId, childId); assert parent instanceof VirtualDirectoryImpl : parent; final VirtualDirectoryImpl dir = (VirtualDirectoryImpl)parent; VirtualFileSystemEntry child = dir.createChild(name, childId, dir.getFileSystem()); dir.addChild(child); return child; } return null; } private static int createAndFillRecord(@NotNull NewVirtualFileSystem delegateSystem, @NotNull VirtualFile delegateFile, int parentId, @NotNull FileAttributes attributes) { final int childId = FSRecords.createRecord(); writeAttributesToRecord(childId, parentId, delegateFile, delegateSystem, attributes); return childId; } private static void appendIdToParentList(final int parentId, final int childId) { int[] childrenList = FSRecords.list(parentId); childrenList = ArrayUtil.append(childrenList, childId); FSRecords.updateList(parentId, childrenList); } private void executeDelete(@NotNull VirtualFile file) { if (!file.exists()) { LOG.error("Deleting a file, which does not exist: " + file.getPath()); return; } clearIdCache(); int id = getFileId(file); final VirtualFile parent = file.getParent(); final int parentId = parent == null ? 0 : getFileId(parent); if (parentId == 0) { String rootUrl = normalizeRootUrl(file.getPath(), (NewVirtualFileSystem)file.getFileSystem()); synchronized (myRoots) { myRoots.remove(rootUrl); myRootsById.remove(id); myIdToDirCache.remove(id); FSRecords.deleteRootRecord(id); } } else { removeIdFromParentList(parentId, id, parent, file); VirtualDirectoryImpl directory = (VirtualDirectoryImpl)file.getParent(); assert directory != null : file; directory.removeChild(file); } FSRecords.deleteRecordRecursively(id); invalidateSubtree(file); incStructuralModificationCount(); } private static void invalidateSubtree(@NotNull VirtualFile file) { final VirtualFileSystemEntry impl = (VirtualFileSystemEntry)file; impl.invalidate(); for (VirtualFile child : impl.getCachedChildren()) { invalidateSubtree(child); } } private static void removeIdFromParentList(final int parentId, final int id, @NotNull VirtualFile parent, VirtualFile file) { int[] childList = FSRecords.list(parentId); int index = ArrayUtil.indexOf(childList, id); if (index == -1) { throw new RuntimeException("Cannot find child (" + id + ")" + file + "\n\tin (" + parentId + ")" + parent + "\n\tactual children:" + Arrays.toString(childList)); } childList = ArrayUtil.remove(childList, index); FSRecords.updateList(parentId, childList); } private static void executeRename(@NotNull VirtualFile file, @NotNull final String newName) { final int id = getFileId(file); FSRecords.setName(id, newName); ((VirtualFileSystemEntry)file).setNewName(newName); } private static void executeSetWritable(@NotNull VirtualFile file, boolean writableFlag) { setFlag(file, IS_READ_ONLY, !writableFlag); ((VirtualFileSystemEntry)file).updateProperty(VirtualFile.PROP_WRITABLE, writableFlag); } private static void executeSetHidden(@NotNull VirtualFile file, boolean hiddenFlag) { setFlag(file, IS_HIDDEN, hiddenFlag); ((VirtualFileSystemEntry)file).updateProperty(VirtualFile.PROP_HIDDEN, hiddenFlag); } private static void executeSetTarget(@NotNull VirtualFile file, String target) { ((VirtualFileSystemEntry)file).setLinkTarget(target); } private static void setFlag(@NotNull VirtualFile file, int mask, boolean value) { setFlag(getFileId(file), mask, value); } private static void setFlag(final int id, final int mask, final boolean value) { int oldFlags = FSRecords.getFlags(id); int flags = value ? oldFlags | mask : oldFlags & ~mask; if (oldFlags != flags) { FSRecords.setFlags(id, flags, true); } } private static boolean checkFlag(int fileId, int mask) { return BitUtil.isSet(FSRecords.getFlags(fileId), mask); } private static void executeTouch(@NotNull VirtualFile file, boolean reloadContentFromDelegate, long newModificationStamp) { if (reloadContentFromDelegate) { setFlag(file, MUST_RELOAD_CONTENT, true); } final NewVirtualFileSystem delegate = getDelegate(file); final FileAttributes attributes = delegate.getAttributes(file); FSRecords.setLength(getFileId(file), attributes != null ? attributes.length : DEFAULT_LENGTH); FSRecords.setTimestamp(getFileId(file), attributes != null ? attributes.lastModified : DEFAULT_TIMESTAMP); ((VirtualFileSystemEntry)file).setModificationStamp(newModificationStamp); } private void executeMove(@NotNull VirtualFile file, @NotNull VirtualFile newParent) { clearIdCache(); final int fileId = getFileId(file); final int newParentId = getFileId(newParent); final int oldParentId = getFileId(file.getParent()); removeIdFromParentList(oldParentId, fileId, file.getParent(), file); FSRecords.setParent(fileId, newParentId); appendIdToParentList(newParentId, fileId); ((VirtualFileSystemEntry)file).setParent(newParent); } @Override public String getName(int id) { assert id > 0; return FSRecords.getName(id); } @TestOnly public void cleanPersistedContents() { final int[] roots = FSRecords.listRoots(); for (int root : roots) { markForContentReloadRecursively(root); } } private void markForContentReloadRecursively(int id) { if (isDirectory(getFileAttributes(id))) { for (int child : FSRecords.list(id)) { markForContentReloadRecursively(child); } } else { setFlag(id, MUST_RELOAD_CONTENT, true); } } private static class FsRoot extends VirtualDirectoryImpl { private final String myName; private final String myPathBeforeSlash; private FsRoot(int id, VfsData.Segment segment, VfsData.DirectoryData data, NewVirtualFileSystem fs, String name, String pathBeforeSlash) { super(id, segment, data, null, fs); myName = name; myPathBeforeSlash = pathBeforeSlash; } @NotNull @Override public CharSequence getNameSequence() { return myName; } @Override protected char[] appendPathOnFileSystem(int pathLength, int[] position) { char[] chars = new char[pathLength + myPathBeforeSlash.length()]; position[0] = copyString(chars, position[0], myPathBeforeSlash); return chars; } @Override public void setNewName(@NotNull String newName) { throw new IncorrectOperationException(); } @Override public final void setParent(@NotNull VirtualFile newParent) { throw new IncorrectOperationException(); } @NotNull @Override public String getPath() { return myPathBeforeSlash + '/'; } @NotNull @Override public String getUrl() { return getFileSystem().getProtocol() + "://" + getPath(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.tools.rumen; import java.util.Set; import java.util.TreeSet; import org.codehaus.jackson.annotate.JsonAnySetter; // HACK ALERT!!! This "should" have have two subclasses, which might be called // LoggedMapTaskAttempt and LoggedReduceTaskAttempt, but // the Jackson implementation of JSON doesn't handle a // superclass-valued field. /** * A {@link LoggedTaskAttempt} represents an attempt to run an hadoop task in a * hadoop job. Note that a task can have several attempts. * * All of the public methods are simply accessors for the instance variables we * want to write out in the JSON files. * */ public class LoggedTaskAttempt implements DeepCompare { String attemptID; Pre21JobHistoryConstants.Values result; long startTime = -1L; long finishTime = -1L; String hostName; long hdfsBytesRead = -1L; long hdfsBytesWritten = -1L; long fileBytesRead = -1L; long fileBytesWritten = -1L; long mapInputRecords = -1L; long mapInputBytes = -1L; long mapOutputBytes = -1L; long mapOutputRecords = -1L; long combineInputRecords = -1L; long reduceInputGroups = -1L; long reduceInputRecords = -1L; long reduceShuffleBytes = -1L; long reduceOutputRecords = -1L; long spilledRecords = -1L; long shuffleFinished = -1L; long sortFinished = -1L; LoggedLocation location; LoggedTaskAttempt() { super(); } static private Set<String> alreadySeenAnySetterAttributes = new TreeSet<String>(); @SuppressWarnings("unused") // for input parameter ignored. @JsonAnySetter public void setUnknownAttribute(String attributeName, Object ignored) { if (!alreadySeenAnySetterAttributes.contains(attributeName)) { alreadySeenAnySetterAttributes.add(attributeName); System.err.println("In LoggedJob, we saw the unknown attribute " + attributeName + "."); } } public long getShuffleFinished() { return shuffleFinished; } void setShuffleFinished(long shuffleFinished) { this.shuffleFinished = shuffleFinished; } public long getSortFinished() { return sortFinished; } void setSortFinished(long sortFinished) { this.sortFinished = sortFinished; } public String getAttemptID() { return attemptID; } void setAttemptID(String attemptID) { this.attemptID = attemptID; } public Pre21JobHistoryConstants.Values getResult() { return result; } void setResult(Pre21JobHistoryConstants.Values result) { this.result = result; } public long getStartTime() { return startTime; } void setStartTime(long startTime) { this.startTime = startTime; } public long getFinishTime() { return finishTime; } void setFinishTime(long finishTime) { this.finishTime = finishTime; } public String getHostName() { return hostName; } void setHostName(String hostName) { this.hostName = (hostName == null) ? null : hostName.intern(); } public long getHdfsBytesRead() { return hdfsBytesRead; } void setHdfsBytesRead(long hdfsBytesRead) { this.hdfsBytesRead = hdfsBytesRead; } public long getHdfsBytesWritten() { return hdfsBytesWritten; } void setHdfsBytesWritten(long hdfsBytesWritten) { this.hdfsBytesWritten = hdfsBytesWritten; } public long getFileBytesRead() { return fileBytesRead; } void setFileBytesRead(long fileBytesRead) { this.fileBytesRead = fileBytesRead; } public long getFileBytesWritten() { return fileBytesWritten; } void setFileBytesWritten(long fileBytesWritten) { this.fileBytesWritten = fileBytesWritten; } public long getMapInputRecords() { return mapInputRecords; } void setMapInputRecords(long mapInputRecords) { this.mapInputRecords = mapInputRecords; } public long getMapOutputBytes() { return mapOutputBytes; } void setMapOutputBytes(long mapOutputBytes) { this.mapOutputBytes = mapOutputBytes; } public long getMapOutputRecords() { return mapOutputRecords; } void setMapOutputRecords(long mapOutputRecords) { this.mapOutputRecords = mapOutputRecords; } public long getCombineInputRecords() { return combineInputRecords; } void setCombineInputRecords(long combineInputRecords) { this.combineInputRecords = combineInputRecords; } public long getReduceInputGroups() { return reduceInputGroups; } void setReduceInputGroups(long reduceInputGroups) { this.reduceInputGroups = reduceInputGroups; } public long getReduceInputRecords() { return reduceInputRecords; } void setReduceInputRecords(long reduceInputRecords) { this.reduceInputRecords = reduceInputRecords; } public long getReduceShuffleBytes() { return reduceShuffleBytes; } void setReduceShuffleBytes(long reduceShuffleBytes) { this.reduceShuffleBytes = reduceShuffleBytes; } public long getReduceOutputRecords() { return reduceOutputRecords; } void setReduceOutputRecords(long reduceOutputRecords) { this.reduceOutputRecords = reduceOutputRecords; } public long getSpilledRecords() { return spilledRecords; } void setSpilledRecords(long spilledRecords) { this.spilledRecords = spilledRecords; } public LoggedLocation getLocation() { return location; } void setLocation(LoggedLocation location) { this.location = location; } public long getMapInputBytes() { return mapInputBytes; } void setMapInputBytes(long mapInputBytes) { this.mapInputBytes = mapInputBytes; } private void compare1(String c1, String c2, TreePath loc, String eltname) throws DeepInequalityException { if (c1 == null && c2 == null) { return; } if (c1 == null || c2 == null || !c1.equals(c2)) { throw new DeepInequalityException(eltname + " miscompared", new TreePath( loc, eltname)); } } private void compare1(long c1, long c2, TreePath loc, String eltname) throws DeepInequalityException { if (c1 != c2) { throw new DeepInequalityException(eltname + " miscompared", new TreePath( loc, eltname)); } } private void compare1(Pre21JobHistoryConstants.Values c1, Pre21JobHistoryConstants.Values c2, TreePath loc, String eltname) throws DeepInequalityException { if (c1 != c2) { throw new DeepInequalityException(eltname + " miscompared", new TreePath( loc, eltname)); } } private void compare1(LoggedLocation c1, LoggedLocation c2, TreePath loc, String eltname) throws DeepInequalityException { if (c1 == null && c2 == null) { return; } TreePath recurse = new TreePath(loc, eltname); if (c1 == null || c2 == null) { throw new DeepInequalityException(eltname + " miscompared", recurse); } c1.deepCompare(c2, recurse); } public void deepCompare(DeepCompare comparand, TreePath loc) throws DeepInequalityException { if (!(comparand instanceof LoggedTaskAttempt)) { throw new DeepInequalityException("comparand has wrong type", loc); } LoggedTaskAttempt other = (LoggedTaskAttempt) comparand; compare1(attemptID, other.attemptID, loc, "attemptID"); compare1(result, other.result, loc, "result"); compare1(startTime, other.startTime, loc, "startTime"); compare1(finishTime, other.finishTime, loc, "finishTime"); compare1(hostName, other.hostName, loc, "hostName"); compare1(hdfsBytesRead, other.hdfsBytesRead, loc, "hdfsBytesRead"); compare1(hdfsBytesWritten, other.hdfsBytesWritten, loc, "hdfsBytesWritten"); compare1(fileBytesRead, other.fileBytesRead, loc, "fileBytesRead"); compare1(fileBytesWritten, other.fileBytesWritten, loc, "fileBytesWritten"); compare1(mapInputBytes, other.mapInputBytes, loc, "mapInputBytes"); compare1(mapInputRecords, other.mapInputRecords, loc, "mapInputRecords"); compare1(mapOutputBytes, other.mapOutputBytes, loc, "mapOutputBytes"); compare1(mapOutputRecords, other.mapOutputRecords, loc, "mapOutputRecords"); compare1(combineInputRecords, other.combineInputRecords, loc, "combineInputRecords"); compare1(reduceInputGroups, other.reduceInputGroups, loc, "reduceInputGroups"); compare1(reduceInputRecords, other.reduceInputRecords, loc, "reduceInputRecords"); compare1(reduceShuffleBytes, other.reduceShuffleBytes, loc, "reduceShuffleBytes"); compare1(reduceOutputRecords, other.reduceOutputRecords, loc, "reduceOutputRecords"); compare1(spilledRecords, other.spilledRecords, loc, "spilledRecords"); compare1(shuffleFinished, other.shuffleFinished, loc, "shuffleFinished"); compare1(sortFinished, other.sortFinished, loc, "sortFinished"); compare1(location, other.location, loc, "location"); } }
/* * The MIT License * * Copyright (c) 2009 The Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package picard.sam; import htsjdk.samtools.util.Histogram; import picard.analysis.MergeableMetricBase; /** * Metrics that are calculated during the process of marking duplicates * within a stream of SAMRecords. */ public class DuplicationMetrics extends MergeableMetricBase { /** * The library on which the duplicate marking was performed. */ @MergeByAssertEquals public String LIBRARY; /** * The number of mapped reads examined which did not have a mapped mate pair, * either because the read is unpaired, or the read is paired to an unmapped mate. */ @MergeByAdding public long UNPAIRED_READS_EXAMINED; /** * The number of mapped read pairs examined. (Primary, non-supplemental) */ @MergeByAdding public long READ_PAIRS_EXAMINED; /** * The number of reads that were either secondary or supplementary */ @MergeByAdding public long SECONDARY_OR_SUPPLEMENTARY_RDS; /** * The total number of unmapped reads examined. (Primary, non-supplemental) */ @MergeByAdding public long UNMAPPED_READS; /** * The number of fragments that were marked as duplicates. */ @MergeByAdding public long UNPAIRED_READ_DUPLICATES; /** * The number of read pairs that were marked as duplicates. */ @MergeByAdding public long READ_PAIR_DUPLICATES; /** * The number of read pairs duplicates that were caused by optical duplication. * Value is always < READ_PAIR_DUPLICATES, which counts all duplicates regardless of source. */ @MergeByAdding public long READ_PAIR_OPTICAL_DUPLICATES; /** * The fraction of mapped sequence that is marked as duplicate. */ @NoMergingIsDerived public Double PERCENT_DUPLICATION; /** * The estimated number of unique molecules in the library based on PE duplication. */ @NoMergingIsDerived public Long ESTIMATED_LIBRARY_SIZE; /** * Fills in the ESTIMATED_LIBRARY_SIZE based on the paired read data examined where * possible and the PERCENT_DUPLICATION. */ @Override public void calculateDerivedFields() { this.ESTIMATED_LIBRARY_SIZE = estimateLibrarySize(this.READ_PAIRS_EXAMINED - this.READ_PAIR_OPTICAL_DUPLICATES, this.READ_PAIRS_EXAMINED - this.READ_PAIR_DUPLICATES); PERCENT_DUPLICATION = (UNPAIRED_READ_DUPLICATES + READ_PAIR_DUPLICATES * 2) / (double) (UNPAIRED_READS_EXAMINED + READ_PAIRS_EXAMINED * 2); } /** * Fills in the ESTIMATED_LIBRARY_SIZE based on the paired read data examined where * possible and the PERCENT_DUPLICATION. * <p> * Deprecated, use {@link #calculateDerivedFields()} instead. */ @Deprecated public void calculateDerivedMetrics() { this.calculateDerivedFields(); } /** * Estimates the size of a library based on the number of paired end molecules observed * and the number of unique pairs observed. * <p> * Based on the Lander-Waterman equation that states: * C/X = 1 - exp( -N/X ) * where * X = number of distinct molecules in library * N = number of read pairs * C = number of distinct fragments observed in read pairs */ public static Long estimateLibrarySize(final long readPairs, final long uniqueReadPairs) { final long readPairDuplicates = readPairs - uniqueReadPairs; if (readPairs > 0 && readPairDuplicates > 0) { double m = 1.0; double M = 100.0; if (uniqueReadPairs >= readPairs || f(m * uniqueReadPairs, uniqueReadPairs, readPairs) < 0) { throw new IllegalStateException("Invalid values for pairs and unique pairs: " + readPairs + ", " + uniqueReadPairs); } // find value of M, large enough to act as other side for bisection method while (f(M * uniqueReadPairs, uniqueReadPairs, readPairs) > 0) { M *= 10.0; } // use bisection method (no more than 40 times) to find solution for (int i = 0; i < 40; i++) { double r = (m + M) / 2.0; double u = f(r * uniqueReadPairs, uniqueReadPairs, readPairs); if (u == 0) { break; } else if (u > 0) { m = r; } else if (u < 0) { M = r; } } return (long) (uniqueReadPairs * (m + M) / 2.0); } else { return null; } } /** * Method that is used in the computation of estimated library size. */ private static double f(double x, double c, double n) { return c / x - 1 + Math.exp(-n / x); } /** * Estimates the ROI (return on investment) that one would see if a library was sequenced to * x higher coverage than the observed coverage. * * @param estimatedLibrarySize the estimated number of molecules in the library * @param x the multiple of sequencing to be simulated (i.e. how many X sequencing) * @param pairs the number of pairs observed in the actual sequencing * @param uniquePairs the number of unique pairs observed in the actual sequencing * @return a number z <= x that estimates if you had pairs*x as your sequencing then you * would observe uniquePairs*z unique pairs. */ public static double estimateRoi(long estimatedLibrarySize, double x, long pairs, long uniquePairs) { return estimatedLibrarySize * (1 - Math.exp(-(x * pairs) / estimatedLibrarySize)) / uniquePairs; } /** * Calculates a histogram using the estimateRoi method to estimate the effective yield * doing x sequencing for x=1..10. */ public Histogram<Double> calculateRoiHistogram() { if (ESTIMATED_LIBRARY_SIZE == null) { try { calculateDerivedFields(); if (ESTIMATED_LIBRARY_SIZE == null) { return null; } } catch (IllegalStateException ise) { return null; } } long uniquePairs = READ_PAIRS_EXAMINED - READ_PAIR_DUPLICATES; Histogram<Double> histo = new Histogram<>(); for (double x = 1; x <= 100; x += 1) { histo.increment(x, estimateRoi(ESTIMATED_LIBRARY_SIZE, x, READ_PAIRS_EXAMINED, uniquePairs)); } histo.setValueLabel("CoverageMult"); return histo; } // Main method used for debugging the derived metrics // Usage = DuplicationMetrics READ_PAIRS READ_PAIR_DUPLICATES public static void main(String[] args) { DuplicationMetrics m = new DuplicationMetrics(); m.READ_PAIRS_EXAMINED = Integer.parseInt(args[0]); m.READ_PAIR_DUPLICATES = Integer.parseInt(args[1]); m.calculateDerivedFields(); System.out.println("Percent Duplication: " + m.PERCENT_DUPLICATION); System.out.println("Est. Library Size : " + m.ESTIMATED_LIBRARY_SIZE); System.out.println(); System.out.println("X Seq\tX Unique"); for (Histogram.Bin<Double> bin : m.calculateRoiHistogram().values()) { System.out.println(bin.getId() + "\t" + bin.getValue()); } } }
/** * This class is generated by jOOQ */ package io.cattle.platform.core.model.tables.records; /** * This class is generated by jOOQ. */ @javax.annotation.Generated(value = { "http://www.jooq.org", "3.3.0" }, comments = "This class is generated by jOOQ") @java.lang.SuppressWarnings({ "all", "unchecked", "rawtypes" }) @javax.persistence.Entity @javax.persistence.Table(name = "ip_address", schema = "cattle") public class IpAddressRecord extends org.jooq.impl.UpdatableRecordImpl<io.cattle.platform.core.model.tables.records.IpAddressRecord> implements io.cattle.platform.db.jooq.utils.TableRecordJaxb, org.jooq.Record17<java.lang.Long, java.lang.String, java.lang.Long, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.util.Date, java.util.Date, java.util.Date, java.util.Map<String,Object>, java.lang.String, java.lang.Long, java.lang.Long, java.lang.Boolean, java.lang.String, java.lang.String>, io.cattle.platform.core.model.IpAddress { private static final long serialVersionUID = -2052657121; /** * Setter for <code>cattle.ip_address.id</code>. */ @Override public void setId(java.lang.Long value) { setValue(0, value); } /** * Getter for <code>cattle.ip_address.id</code>. */ @javax.persistence.Id @javax.persistence.Column(name = "id", unique = true, nullable = false, precision = 19) @Override public java.lang.Long getId() { return (java.lang.Long) getValue(0); } /** * Setter for <code>cattle.ip_address.name</code>. */ @Override public void setName(java.lang.String value) { setValue(1, value); } /** * Getter for <code>cattle.ip_address.name</code>. */ @javax.persistence.Column(name = "name", length = 255) @Override public java.lang.String getName() { return (java.lang.String) getValue(1); } /** * Setter for <code>cattle.ip_address.account_id</code>. */ @Override public void setAccountId(java.lang.Long value) { setValue(2, value); } /** * Getter for <code>cattle.ip_address.account_id</code>. */ @javax.persistence.Column(name = "account_id", precision = 19) @Override public java.lang.Long getAccountId() { return (java.lang.Long) getValue(2); } /** * Setter for <code>cattle.ip_address.kind</code>. */ @Override public void setKind(java.lang.String value) { setValue(3, value); } /** * Getter for <code>cattle.ip_address.kind</code>. */ @javax.persistence.Column(name = "kind", nullable = false, length = 255) @Override public java.lang.String getKind() { return (java.lang.String) getValue(3); } /** * Setter for <code>cattle.ip_address.uuid</code>. */ @Override public void setUuid(java.lang.String value) { setValue(4, value); } /** * Getter for <code>cattle.ip_address.uuid</code>. */ @javax.persistence.Column(name = "uuid", unique = true, nullable = false, length = 128) @Override public java.lang.String getUuid() { return (java.lang.String) getValue(4); } /** * Setter for <code>cattle.ip_address.description</code>. */ @Override public void setDescription(java.lang.String value) { setValue(5, value); } /** * Getter for <code>cattle.ip_address.description</code>. */ @javax.persistence.Column(name = "description", length = 1024) @Override public java.lang.String getDescription() { return (java.lang.String) getValue(5); } /** * Setter for <code>cattle.ip_address.state</code>. */ @Override public void setState(java.lang.String value) { setValue(6, value); } /** * Getter for <code>cattle.ip_address.state</code>. */ @javax.persistence.Column(name = "state", nullable = false, length = 128) @Override public java.lang.String getState() { return (java.lang.String) getValue(6); } /** * Setter for <code>cattle.ip_address.created</code>. */ @Override public void setCreated(java.util.Date value) { setValue(7, value); } /** * Getter for <code>cattle.ip_address.created</code>. */ @javax.persistence.Column(name = "created") @Override public java.util.Date getCreated() { return (java.util.Date) getValue(7); } /** * Setter for <code>cattle.ip_address.removed</code>. */ @Override public void setRemoved(java.util.Date value) { setValue(8, value); } /** * Getter for <code>cattle.ip_address.removed</code>. */ @javax.persistence.Column(name = "removed") @Override public java.util.Date getRemoved() { return (java.util.Date) getValue(8); } /** * Setter for <code>cattle.ip_address.remove_time</code>. */ @Override public void setRemoveTime(java.util.Date value) { setValue(9, value); } /** * Getter for <code>cattle.ip_address.remove_time</code>. */ @javax.persistence.Column(name = "remove_time") @Override public java.util.Date getRemoveTime() { return (java.util.Date) getValue(9); } /** * Setter for <code>cattle.ip_address.data</code>. */ @Override public void setData(java.util.Map<String,Object> value) { setValue(10, value); } /** * Getter for <code>cattle.ip_address.data</code>. */ @javax.persistence.Column(name = "data", length = 16777215) @Override public java.util.Map<String,Object> getData() { return (java.util.Map<String,Object>) getValue(10); } /** * Setter for <code>cattle.ip_address.address</code>. */ @Override public void setAddress(java.lang.String value) { setValue(11, value); } /** * Getter for <code>cattle.ip_address.address</code>. */ @javax.persistence.Column(name = "address", length = 255) @Override public java.lang.String getAddress() { return (java.lang.String) getValue(11); } /** * Setter for <code>cattle.ip_address.subnet_id</code>. */ @Override public void setSubnetId(java.lang.Long value) { setValue(12, value); } /** * Getter for <code>cattle.ip_address.subnet_id</code>. */ @javax.persistence.Column(name = "subnet_id", precision = 19) @Override public java.lang.Long getSubnetId() { return (java.lang.Long) getValue(12); } /** * Setter for <code>cattle.ip_address.network_id</code>. */ @Override public void setNetworkId(java.lang.Long value) { setValue(13, value); } /** * Getter for <code>cattle.ip_address.network_id</code>. */ @javax.persistence.Column(name = "network_id", precision = 19) @Override public java.lang.Long getNetworkId() { return (java.lang.Long) getValue(13); } /** * Setter for <code>cattle.ip_address.is_public</code>. */ @Override public void setIsPublic(java.lang.Boolean value) { setValue(14, value); } /** * Getter for <code>cattle.ip_address.is_public</code>. */ @javax.persistence.Column(name = "is_public", nullable = false, precision = 1) @Override public java.lang.Boolean getIsPublic() { return (java.lang.Boolean) getValue(14); } /** * Setter for <code>cattle.ip_address.role</code>. */ @Override public void setRole(java.lang.String value) { setValue(15, value); } /** * Getter for <code>cattle.ip_address.role</code>. */ @javax.persistence.Column(name = "role", length = 128) @Override public java.lang.String getRole() { return (java.lang.String) getValue(15); } /** * Setter for <code>cattle.ip_address.hostname</code>. */ @Override public void setHostname(java.lang.String value) { setValue(16, value); } /** * Getter for <code>cattle.ip_address.hostname</code>. */ @javax.persistence.Column(name = "hostname", length = 255) @Override public java.lang.String getHostname() { return (java.lang.String) getValue(16); } // ------------------------------------------------------------------------- // Primary key information // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public org.jooq.Record1<java.lang.Long> key() { return (org.jooq.Record1) super.key(); } // ------------------------------------------------------------------------- // Record17 type implementation // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public org.jooq.Row17<java.lang.Long, java.lang.String, java.lang.Long, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.util.Date, java.util.Date, java.util.Date, java.util.Map<String,Object>, java.lang.String, java.lang.Long, java.lang.Long, java.lang.Boolean, java.lang.String, java.lang.String> fieldsRow() { return (org.jooq.Row17) super.fieldsRow(); } /** * {@inheritDoc} */ @Override public org.jooq.Row17<java.lang.Long, java.lang.String, java.lang.Long, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.util.Date, java.util.Date, java.util.Date, java.util.Map<String,Object>, java.lang.String, java.lang.Long, java.lang.Long, java.lang.Boolean, java.lang.String, java.lang.String> valuesRow() { return (org.jooq.Row17) super.valuesRow(); } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Long> field1() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.ID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field2() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.NAME; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Long> field3() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.ACCOUNT_ID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field4() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.KIND; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field5() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.UUID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field6() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.DESCRIPTION; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field7() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.STATE; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.util.Date> field8() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.CREATED; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.util.Date> field9() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.REMOVED; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.util.Date> field10() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.REMOVE_TIME; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.util.Map<String,Object>> field11() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.DATA; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field12() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.ADDRESS; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Long> field13() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.SUBNET_ID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Long> field14() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.NETWORK_ID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Boolean> field15() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.IS_PUBLIC; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field16() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.ROLE; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field17() { return io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS.HOSTNAME; } /** * {@inheritDoc} */ @Override public java.lang.Long value1() { return getId(); } /** * {@inheritDoc} */ @Override public java.lang.String value2() { return getName(); } /** * {@inheritDoc} */ @Override public java.lang.Long value3() { return getAccountId(); } /** * {@inheritDoc} */ @Override public java.lang.String value4() { return getKind(); } /** * {@inheritDoc} */ @Override public java.lang.String value5() { return getUuid(); } /** * {@inheritDoc} */ @Override public java.lang.String value6() { return getDescription(); } /** * {@inheritDoc} */ @Override public java.lang.String value7() { return getState(); } /** * {@inheritDoc} */ @Override public java.util.Date value8() { return getCreated(); } /** * {@inheritDoc} */ @Override public java.util.Date value9() { return getRemoved(); } /** * {@inheritDoc} */ @Override public java.util.Date value10() { return getRemoveTime(); } /** * {@inheritDoc} */ @Override public java.util.Map<String,Object> value11() { return getData(); } /** * {@inheritDoc} */ @Override public java.lang.String value12() { return getAddress(); } /** * {@inheritDoc} */ @Override public java.lang.Long value13() { return getSubnetId(); } /** * {@inheritDoc} */ @Override public java.lang.Long value14() { return getNetworkId(); } /** * {@inheritDoc} */ @Override public java.lang.Boolean value15() { return getIsPublic(); } /** * {@inheritDoc} */ @Override public java.lang.String value16() { return getRole(); } /** * {@inheritDoc} */ @Override public java.lang.String value17() { return getHostname(); } /** * {@inheritDoc} */ @Override public IpAddressRecord value1(java.lang.Long value) { setId(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value2(java.lang.String value) { setName(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value3(java.lang.Long value) { setAccountId(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value4(java.lang.String value) { setKind(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value5(java.lang.String value) { setUuid(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value6(java.lang.String value) { setDescription(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value7(java.lang.String value) { setState(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value8(java.util.Date value) { setCreated(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value9(java.util.Date value) { setRemoved(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value10(java.util.Date value) { setRemoveTime(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value11(java.util.Map<String,Object> value) { setData(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value12(java.lang.String value) { setAddress(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value13(java.lang.Long value) { setSubnetId(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value14(java.lang.Long value) { setNetworkId(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value15(java.lang.Boolean value) { setIsPublic(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value16(java.lang.String value) { setRole(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord value17(java.lang.String value) { setHostname(value); return this; } /** * {@inheritDoc} */ @Override public IpAddressRecord values(java.lang.Long value1, java.lang.String value2, java.lang.Long value3, java.lang.String value4, java.lang.String value5, java.lang.String value6, java.lang.String value7, java.util.Date value8, java.util.Date value9, java.util.Date value10, java.util.Map<String,Object> value11, java.lang.String value12, java.lang.Long value13, java.lang.Long value14, java.lang.Boolean value15, java.lang.String value16, java.lang.String value17) { return this; } // ------------------------------------------------------------------------- // FROM and INTO // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public void from(io.cattle.platform.core.model.IpAddress from) { setId(from.getId()); setName(from.getName()); setAccountId(from.getAccountId()); setKind(from.getKind()); setUuid(from.getUuid()); setDescription(from.getDescription()); setState(from.getState()); setCreated(from.getCreated()); setRemoved(from.getRemoved()); setRemoveTime(from.getRemoveTime()); setData(from.getData()); setAddress(from.getAddress()); setSubnetId(from.getSubnetId()); setNetworkId(from.getNetworkId()); setIsPublic(from.getIsPublic()); setRole(from.getRole()); setHostname(from.getHostname()); } /** * {@inheritDoc} */ @Override public <E extends io.cattle.platform.core.model.IpAddress> E into(E into) { into.from(this); return into; } // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- /** * Create a detached IpAddressRecord */ public IpAddressRecord() { super(io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS); } /** * Create a detached, initialised IpAddressRecord */ public IpAddressRecord(java.lang.Long id, java.lang.String name, java.lang.Long accountId, java.lang.String kind, java.lang.String uuid, java.lang.String description, java.lang.String state, java.util.Date created, java.util.Date removed, java.util.Date removeTime, java.util.Map<String,Object> data, java.lang.String address, java.lang.Long subnetId, java.lang.Long networkId, java.lang.Boolean isPublic, java.lang.String role, java.lang.String hostname) { super(io.cattle.platform.core.model.tables.IpAddressTable.IP_ADDRESS); setValue(0, id); setValue(1, name); setValue(2, accountId); setValue(3, kind); setValue(4, uuid); setValue(5, description); setValue(6, state); setValue(7, created); setValue(8, removed); setValue(9, removeTime); setValue(10, data); setValue(11, address); setValue(12, subnetId); setValue(13, networkId); setValue(14, isPublic); setValue(15, role); setValue(16, hostname); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages; import static com.google.devtools.build.lib.packages.BuildType.TRISTATE; import static com.google.devtools.build.lib.packages.Type.BOOLEAN; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.syntax.Dict; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.Location; import com.google.devtools.build.lib.util.Pair; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Nullable; /** * Utility functions over Targets that don't really belong in the base {@link * Target} interface. */ public final class TargetUtils { // *_test / test_suite attribute that used to specify constraint keywords. private static final String CONSTRAINTS_ATTR = "tags"; // We don't want to pollute the execution info with random things, and we also need to reserve // some internal tags that we don't allow to be set on targets. We also don't want to // exhaustively enumerate all the legal values here. Right now, only a ~small set of tags is // recognized by Bazel. private static boolean legalExecInfoKeys(String tag) { return tag.startsWith("block-") || tag.startsWith("requires-") || tag.startsWith("no-") || tag.startsWith("supports-") || tag.startsWith("disable-") || tag.equals("local") || tag.startsWith("cpu:"); } private TargetUtils() {} // Uninstantiable. public static boolean isTestRuleName(String name) { return name.endsWith("_test"); } public static boolean isTestSuiteRuleName(String name) { return name.equals("test_suite"); } /** * Returns true iff {@code target} is a {@code *_test} rule; excludes {@code * test_suite}. */ public static boolean isTestRule(Target target) { return (target instanceof Rule) && isTestRuleName(((Rule) target).getRuleClass()); } /** * Returns true iff {@code target} is a {@code test_suite} rule. */ public static boolean isTestSuiteRule(Target target) { return target instanceof Rule && isTestSuiteRuleName(((Rule) target).getRuleClass()); } /** Returns true iff {@code target} is an {@code alias} rule. */ public static boolean isAlias(Target target) { if (!(target instanceof Rule)) { return false; } Rule rule = (Rule) target; return !rule.getRuleClassObject().isSkylark() && rule.getRuleClass().equals("alias"); } /** * Returns true iff {@code target} is a {@code *_test} or {@code test_suite}. */ public static boolean isTestOrTestSuiteRule(Target target) { return isTestRule (target) || isTestSuiteRule(target); } /** * Returns true if {@code target} has "manual" in the tags attribute and thus should be ignored by * command-line wildcards or by test_suite $implicit_tests attribute. */ public static boolean hasManualTag(Target target) { return (target instanceof Rule) && hasConstraint((Rule) target, "manual"); } /** * Returns true if test marked as "exclusive" by the appropriate keyword * in the tags attribute. * * Method assumes that passed target is a test rule, so usually it should be * used only after isTestRule() or isTestOrTestSuiteRule(). Behavior is * undefined otherwise. */ public static boolean isExclusiveTestRule(Rule rule) { return hasConstraint(rule, "exclusive"); } /** * Returns true if test marked as "local" by the appropriate keyword * in the tags attribute. * * Method assumes that passed target is a test rule, so usually it should be * used only after isTestRule() or isTestOrTestSuiteRule(). Behavior is * undefined otherwise. */ public static boolean isLocalTestRule(Rule rule) { return hasConstraint(rule, "local") || NonconfigurableAttributeMapper.of(rule).get("local", Type.BOOLEAN); } /** * Returns true if the rule is a test or test suite and is local or exclusive. * Wraps the above calls into one generic check safely applicable to any rule. */ public static boolean isTestRuleAndRunsLocally(Rule rule) { return isTestOrTestSuiteRule(rule) && (isLocalTestRule(rule) || isExclusiveTestRule(rule)); } /** * Returns true if test marked as "external" by the appropriate keyword * in the tags attribute. * * Method assumes that passed target is a test rule, so usually it should be * used only after isTestRule() or isTestOrTestSuiteRule(). Behavior is * undefined otherwise. */ public static boolean isExternalTestRule(Rule rule) { return hasConstraint(rule, "external"); } public static List<String> getStringListAttr(Target target, String attrName) { Preconditions.checkArgument(target instanceof Rule); return NonconfigurableAttributeMapper.of((Rule) target).get(attrName, Type.STRING_LIST); } public static String getStringAttr(Target target, String attrName) { Preconditions.checkArgument(target instanceof Rule); return NonconfigurableAttributeMapper.of((Rule) target).get(attrName, Type.STRING); } public static Iterable<String> getAttrAsString(Target target, String attrName) { Preconditions.checkArgument(target instanceof Rule); List<String> values = new ArrayList<>(); // May hold null values. Attribute attribute = ((Rule) target).getAttributeDefinition(attrName); if (attribute != null) { Type<?> attributeType = attribute.getType(); for (Object attrValue : AggregatingAttributeMapper.of((Rule) target) .visitAttribute(attribute.getName(), attributeType)) { // Ugly hack to maintain backward 'attr' query compatibility for BOOLEAN and TRISTATE // attributes. These are internally stored as actual Boolean or TriState objects but were // historically queried as integers. To maintain compatibility, we inspect their actual // value and return the integer equivalent represented as a String. This code is the // opposite of the code in BooleanType and TriStateType respectively. if (attributeType == BOOLEAN) { values.add(Type.BOOLEAN.cast(attrValue) ? "1" : "0"); } else if (attributeType == TRISTATE) { switch (BuildType.TRISTATE.cast(attrValue)) { case AUTO: values.add("-1"); break; case NO: values.add("0"); break; case YES: values.add("1"); break; default: throw new AssertionError("This can't happen!"); } } else { values.add(attrValue == null ? null : attrValue.toString()); } } } return values; } /** * If the given target is a rule, returns its <code>deprecation<code/> value, or null if unset. */ @Nullable public static String getDeprecation(Target target) { if (!(target instanceof Rule)) { return null; } Rule rule = (Rule) target; return (rule.isAttrDefined("deprecation", Type.STRING)) ? NonconfigurableAttributeMapper.of(rule).get("deprecation", Type.STRING) : null; } /** * Checks whether specified constraint keyword is present in the * tags attribute of the test or test suite rule. * * Method assumes that provided rule is a test or a test suite. Behavior is * undefined otherwise. */ private static boolean hasConstraint(Rule rule, String keyword) { return NonconfigurableAttributeMapper.of(rule).get(CONSTRAINTS_ATTR, Type.STRING_LIST) .contains(keyword); } /** * Returns the execution info from the tags declared on the target. These include only some tags * {@link #legalExecInfoKeys} as keys with empty values. */ public static Map<String, String> getExecutionInfo(Rule rule) { // tags may contain duplicate values. Map<String, String> map = new HashMap<>(); for (String tag : NonconfigurableAttributeMapper.of(rule).get(CONSTRAINTS_ATTR, Type.STRING_LIST)) { if (legalExecInfoKeys(tag)) { map.put(tag, ""); } } return ImmutableMap.copyOf(map); } /** * Returns the execution info from the tags declared on the target. These include only some tags * {@link #legalExecInfoKeys} as keys with empty values. * * @param rule a rule instance to get tags from * @param allowTagsPropagation if set to true, tags will be propagated from a target to the * actions' execution requirements, for more details {@see * SkylarkSematicOptions#experimentalAllowTagsPropagation} */ public static ImmutableMap<String, String> getExecutionInfo( Rule rule, boolean allowTagsPropagation) { if (allowTagsPropagation) { return ImmutableMap.copyOf(getExecutionInfo(rule)); } else { return ImmutableMap.of(); } } /** * Returns the execution info, obtained from the rule's tags and the execution requirements * provided. Only supported tags are included into the execution info, see {@link * #legalExecInfoKeys}. * * @param executionRequirementsUnchecked execution_requirements of a rule, expected to be of a * {@code Dict<String, String>} type, null or {@link * com.google.devtools.build.lib.syntax.Runtime#NONE} * @param rule a rule instance to get tags from * @param allowTagsPropagation if set to true, tags will be propagated from a target to the * actions' execution requirements, for more details {@see * SkylarkSematicOptions#experimentalAllowTagsPropagation} */ public static ImmutableMap<String, String> getFilteredExecutionInfo( Object executionRequirementsUnchecked, Rule rule, boolean allowTagsPropagation) throws EvalException { Map<String, String> checkedExecutionRequirements = TargetUtils.filter( Dict.castSkylarkDictOrNoneToDict( executionRequirementsUnchecked, String.class, String.class, "execution_requirements")); Map<String, String> executionInfoBuilder = new HashMap<>(); // adding filtered execution requirements to the execution info map executionInfoBuilder.putAll(checkedExecutionRequirements); if (allowTagsPropagation) { Map<String, String> checkedTags = getExecutionInfo(rule); // merging filtered tags to the execution info map avoiding duplicates checkedTags.forEach(executionInfoBuilder::putIfAbsent); } return ImmutableMap.copyOf(executionInfoBuilder); } /** * Returns the execution info. These include execution requirement tags ('block-*', 'requires-*', * 'no-*', 'supports-*', 'disable-*', 'local', and 'cpu:*') as keys with empty values. */ public static Map<String, String> filter(Map<String, String> executionInfo) { return Maps.filterKeys(executionInfo, TargetUtils::legalExecInfoKeys); } /** * Returns the language part of the rule name (e.g. "foo" for foo_test or foo_binary). * * <p>In practice this is the part before the "_", if any, otherwise the entire rule class name. * * <p>Precondition: isTestRule(target) || isRunnableNonTestRule(target). */ public static String getRuleLanguage(Target target) { return getRuleLanguage(((Rule) target).getRuleClass()); } /** * Returns the language part of the rule name (e.g. "foo" for foo_test or foo_binary). * * <p>In practice this is the part before the "_", if any, otherwise the entire rule class name. */ public static String getRuleLanguage(String ruleClass) { int index = ruleClass.lastIndexOf('_'); // Chop off "_binary" or "_test". return index != -1 ? ruleClass.substring(0, index) : ruleClass; } private static boolean isExplicitDependency(Rule rule, Label label) { if (rule.getVisibility().getDependencyLabels().contains(label)) { return true; } for (AttributeMap.DepEdge depEdge : AggregatingAttributeMapper.of(rule).visitLabels()) { if (rule.isAttributeValueExplicitlySpecified(depEdge.getAttribute()) && label.equals(depEdge.getLabel())) { return true; } } return false; } /** * Returns a predicate to be used for test tag filtering, i.e., that only accepts tests that match * all of the required tags and none of the excluded tags. */ public static Predicate<Target> tagFilter(List<String> tagFilterList) { Pair<Collection<String>, Collection<String>> tagLists = TestTargetUtils.sortTagsBySense(tagFilterList); final Collection<String> requiredTags = tagLists.first; final Collection<String> excludedTags = tagLists.second; return input -> { if (requiredTags.isEmpty() && excludedTags.isEmpty()) { return true; } if (!(input instanceof Rule)) { return requiredTags.isEmpty(); } // Note that test_tags are those originating from the XX_test rule, whereas the requiredTags // and excludedTags originate from the command line or test_suite rule. // TODO(ulfjack): getRuleTags is inconsistent with TestFunction and other places that use // tags + size, but consistent with TestSuite. return TestTargetUtils.testMatchesFilters( ((Rule) input).getRuleTags(), requiredTags, excludedTags, false); }; } /** * Return {@link Location} for {@link Target} target, if it should not be null. */ public static Location getLocationMaybe(Target target) { return (target instanceof Rule) || (target instanceof InputFile) ? target.getLocation() : null; } /** * Return nicely formatted error message that {@link Label} label that was pointed to by {@link * Target} target did not exist, due to {@link NoSuchThingException} e. */ public static String formatMissingEdge( @Nullable Target target, Label label, NoSuchThingException e, @Nullable Attribute attr) { // instanceof returns false if target is null (which is exploited here) if (target instanceof Rule) { Rule rule = (Rule) target; if (isExplicitDependency(rule, label)) { return String.format("%s and referenced by '%s'", e.getMessage(), target.getLabel()); } else { String additionalInfo = ""; if (attr != null && !Strings.isNullOrEmpty(attr.getDoc())) { additionalInfo = String.format( "\nDocumentation for implicit attribute %s of rules of type %s:\n%s", attr.getPublicName(), rule.getRuleClass(), attr.getDoc()); } // N.B. If you see this error message in one of our integration tests during development of // a change that adds a new implicit dependency when running Blaze, maybe you forgot to add // a new mock target to the integration test's setup. return String.format( "every rule of type %s implicitly depends upon the target '%s', but " + "this target could not be found because of: %s%s", rule.getRuleClass(), label, e.getMessage(), additionalInfo); } } else if (target instanceof InputFile) { return e.getMessage() + " (this is usually caused by a missing package group in the" + " package-level visibility declaration)"; } else { if (target != null) { return String.format("in target '%s', no such label '%s': %s", target.getLabel(), label, e.getMessage()); } return e.getMessage(); } } public static String formatMissingEdge( @Nullable Target target, Label label, NoSuchThingException e) { return formatMissingEdge(target, label, e, null); } }
/** * Most of the code in the Qalingo project is copyrighted Hoteia and licensed * under the Apache License Version 2.0 (release version 0.8.0) * http://www.apache.org/licenses/LICENSE-2.0 * * Copyright (c) Hoteia, 2012-2014 * http://www.hoteia.com - http://twitter.com/hoteia - contact@hoteia.com * */ package org.hoteia.qalingo.core.solr.bean; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.apache.solr.client.solrj.beans.Field; public class ProductMarketingSolr { @Field private Long id; @Field private String description; private Boolean isDefault; @Field private String code; @Field private String name; @Field private String defaultCategoryCode; @Field private String defaultProductSkuCode; @Field private Float price; @Field private List<String> catalogCode = new ArrayList<String>(); @Field private List<String> catalogCategories = new ArrayList<String>(); @Field private List<String> tags = new ArrayList<String>(); @Field("datecreate") private Date dateCreate; @Field("dateupdate") private Date dateUpdate; public String getName() { return name; } public void setName(String name) { this.name = name; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public Boolean getIsDefault() { return isDefault; } public void setIsDefault(Boolean isDefault) { this.isDefault = isDefault; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } public String getDefaultCategoryCode() { return defaultCategoryCode; } public void setDefaultCategoryCode(String defaultCategoryCode) { this.defaultCategoryCode = defaultCategoryCode; } public String getDefaultProductSkuCode() { return defaultProductSkuCode; } public void setDefaultProductSkuCode(String defaultProductSkuCode) { this.defaultProductSkuCode = defaultProductSkuCode; } public Float getPrice() { return price; } public void setPrice(Float price) { this.price = price; } public List<String> getCatalogCode() { return catalogCode; } public void setCatalogCode(List<String> catalogCode) { this.catalogCode = catalogCode; } public void addCatalogCode(String catalogCode){ if(this.catalogCode == null){ this.catalogCode = new ArrayList<String>(); } if(!this.catalogCode.contains(catalogCode)){ this.catalogCode.add(catalogCode); } } public List<String> getCatalogCategories() { return catalogCategories; } public void setCatalogCategories(List<String> catalogCategories) { this.catalogCategories = catalogCategories; } public void addCatalogCategories(String catalogCategoryCode){ if(this.catalogCategories == null){ this.catalogCategories = new ArrayList<String>(); } if(!this.catalogCategories.contains(catalogCategoryCode)){ this.catalogCategories.add(catalogCategoryCode); } } public List<String> getTags() { return tags; } public void setTags(List<String> tags) { this.tags = tags; } public void addTags(String tag){ if(this.tags == null){ this.tags = new ArrayList<String>(); } if(!this.tags.contains(tag)){ this.tags.add(tag); } } public Date getDateCreate() { return dateCreate; } public void setDateCreate(Date dateCreate) { this.dateCreate = dateCreate; } public Date getDateUpdate() { return dateUpdate; } public void setDateUpdate(Date dateUpdate) { this.dateUpdate = dateUpdate; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((code == null) ? 0 : code.hashCode()); result = prime * result + ((id == null) ? 0 : id.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ProductMarketingSolr other = (ProductMarketingSolr) obj; if (code == null) { if (other.code != null) return false; } else if (!code.equals(other.code)) return false; if (id == null) { if (other.id != null) return false; } else if (!id.equals(other.id)) return false; return true; } @Override public String toString() { return "ProductMarketingSolr [id=" + id + ", description=" + description + ", isDefault=" + isDefault + ", code=" + code + ", name=" + name + ", defaultCategoryCode=" + defaultCategoryCode + ", defaultProductSkuCode=" + defaultProductSkuCode + ", catalogCode=" + catalogCode + ", price=" + price + ", dateCreate=" + dateCreate + ", dateUpdate=" + dateUpdate + ", catalogCategories=" + catalogCategories + "]"; } }
/* * Copyright 2004,2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rampart.util; import org.apache.axiom.om.OMAbstractFactory; import org.apache.axiom.om.OMAttribute; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMFactory; import org.apache.axiom.om.OMNamespace; import org.apache.axiom.om.OMNode; import org.apache.axiom.om.OMXMLBuilderFactory; import org.apache.axiom.om.OMXMLParserWrapper; import org.apache.axiom.soap.SOAPEnvelope; import org.apache.axiom.soap.SOAPHeader; import org.apache.axiom.soap.SOAPHeaderBlock; import org.apache.axiom.soap.SOAPModelBuilder; import org.apache.rampart.handler.WSSHandlerConstants; import org.apache.ws.security.WSSecurityException; import org.apache.xml.security.utils.XMLUtils; import org.w3c.dom.DOMConfiguration; import org.w3c.dom.Document; import org.w3c.dom.Element; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.stream.FactoryConfigurationError; import javax.xml.stream.XMLStreamReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.Iterator; /** * Utility class for the Axis2-WSS4J Module */ public class Axis2Util { private static ThreadLocal doomTacker = new ThreadLocal(); public static boolean isUseDOOM() { Object value = doomTacker.get(); return (value != null); } public static void useDOOM(boolean isDOOMRequired) { //TODO Enable this when we have DOOM fixed to be able to flow in and out of Axis2 // if(isDOOMRequired) { // if(!isUseDOOM()) { // System.setProperty(OMAbstractFactory.SOAP11_FACTORY_NAME_PROPERTY, SOAP11Factory.class.getName()); // System.setProperty(OMAbstractFactory.SOAP12_FACTORY_NAME_PROPERTY, SOAP12Factory.class.getName()); // System.setProperty(OMAbstractFactory.OM_FACTORY_NAME_PROPERTY, OMDOMFactory.class.getName()); // doomTacker.set(new Object()); // } // } else { // System.getProperties().remove(OMAbstractFactory.SOAP11_FACTORY_NAME_PROPERTY); // System.getProperties().remove(OMAbstractFactory.SOAP12_FACTORY_NAME_PROPERTY); // System.getProperties().remove(OMAbstractFactory.OM_FACTORY_NAME_PROPERTY); // doomTacker.set(null); // } } /** * Creates a DOM Document using the SOAP Envelope. * @param env An org.apache.axiom.soap.SOAPEnvelope instance * @return Returns the DOM Document of the given SOAP Envelope. */ public static Document getDocumentFromSOAPEnvelope(SOAPEnvelope env, boolean useDoom) throws WSSecurityException { try { if(env instanceof Element) { Element element = (Element)env; Document document = element.getOwnerDocument(); // For outgoing messages, Axis2 only creates the SOAPEnvelope, but no document. If // the Axiom implementation also supports DOM, then the envelope (seen as a DOM // element) will have an owner document, but the document and the envelope have no // parent-child relationship. On the other hand, the input expected by WSS4J is // a document with the envelope as document element. Therefore we need to set the // envelope as document element on the owner document. if (element.getParentNode() != document) { document.appendChild(element); } // If the Axiom implementation supports DOM, then it is possible/likely that the // DOM API was used to create the object model (or parts of it). In this case, the // object model is not necessarily well formed with respect to namespaces because // DOM doesn't generate namespace declarations automatically. This is an issue // because WSS4J/Santuario expects that all namespace declarations are present. // If this is not the case, then signature values or encryptions will be incorrect. // To avoid this, we normalize the document. Note that if we disable the other // normalizations supported by DOM, this is generally not a heavy operation. // In particular, the Axiom implementation is not required to expand the object // model (including OMSourcedElements) because the Axiom builder is required to // perform namespace repairing, so that no modifications to unexpanded parts of // the message are required. DOMConfiguration domConfig = document.getDomConfig(); domConfig.setParameter("split-cdata-sections", Boolean.FALSE); domConfig.setParameter("well-formed", Boolean.FALSE); domConfig.setParameter("namespaces", Boolean.TRUE); document.normalizeDocument(); return document; } if (useDoom) { env.build(); // Workaround to prevent a bug in AXIOM where // there can be an incomplete OMElement as the first child body OMElement firstElement = env.getBody().getFirstElement(); if (firstElement != null) { firstElement.build(); } //Get processed headers SOAPHeader soapHeader = env.getHeader(); ArrayList processedHeaderQNames = new ArrayList(); if(soapHeader != null) { Iterator headerBlocs = soapHeader.getChildElements(); while (headerBlocs.hasNext()) { SOAPHeaderBlock element = (SOAPHeaderBlock) headerBlocs.next(); if(element.isProcessed()) { processedHeaderQNames.add(element.getQName()); } } } SOAPModelBuilder stAXSOAPModelBuilder = OMXMLBuilderFactory.createStAXSOAPModelBuilder( OMAbstractFactory.getMetaFactory(OMAbstractFactory.FEATURE_DOM), env.getXMLStreamReader()); SOAPEnvelope envelope = (stAXSOAPModelBuilder) .getSOAPEnvelope(); envelope.getParent().build(); //Set the processed flag of the processed headers SOAPHeader header = envelope.getHeader(); for (Iterator iter = processedHeaderQNames.iterator(); iter .hasNext();) { QName name = (QName) iter.next(); Iterator<SOAPHeaderBlock> omKids = header.getHeaderBlocksWithName(name); if(omKids.hasNext()) { omKids.next().setProcessed(); } } Element envElem = (Element) envelope; return envElem.getOwnerDocument(); } else { ByteArrayOutputStream baos = new ByteArrayOutputStream(); env.build(); env.serialize(baos); ByteArrayInputStream bais = new ByteArrayInputStream(baos .toByteArray()); DocumentBuilderFactory factory = DocumentBuilderFactory .newInstance(); factory.setNamespaceAware(true); return factory.newDocumentBuilder().parse(bais); } } catch (Exception e) { throw new WSSecurityException( "Error in converting SOAP Envelope to Document", e); } } /** * Builds a SOAPEnvelope from DOM Document. * @param doc - The dom document that contains a SOAP message * @param useDoom * @return * @throws WSSecurityException */ public static SOAPEnvelope getSOAPEnvelopeFromDOMDocument(Document doc, boolean useDoom) throws WSSecurityException { Element documentElement = doc.getDocumentElement(); if (documentElement instanceof SOAPEnvelope) { SOAPEnvelope env = (SOAPEnvelope)documentElement; // If the DOM tree already implements the Axiom API and the corresponding // Axiom implementation is also used as default implementation, then just return // the SOAPEnvelope directly. Note that this will never be the case for DOOM, // but may be the case for a non standard Axiom implementation. if (env.getOMFactory().getMetaFactory() == OMAbstractFactory.getMetaFactory()) { return env; } } if(useDoom) { try { //Get processed headers SOAPEnvelope env = (SOAPEnvelope)doc.getDocumentElement(); ArrayList processedHeaderQNames = new ArrayList(); SOAPHeader soapHeader = env.getHeader(); if(soapHeader != null) { Iterator headerBlocs = soapHeader.getChildElements(); while (headerBlocs.hasNext()) { OMElement element = (OMElement)headerBlocs.next(); SOAPHeaderBlock header = null; if (element instanceof SOAPHeaderBlock) { header = (SOAPHeaderBlock) element; // If a header block is not an instance of SOAPHeaderBlock, it means that // it is a header we have added in rampart eg. EncryptedHeader and should // be converted to SOAPHeaderBlock for processing } else { header = soapHeader.addHeaderBlock(element.getLocalName(), element.getNamespace()); Iterator attrIter = element.getAllAttributes(); while (attrIter.hasNext()) { OMAttribute attr = (OMAttribute)attrIter.next(); header.addAttribute(attr.getLocalName(), attr.getAttributeValue(), attr.getNamespace()); } Iterator nsIter = element.getAllDeclaredNamespaces(); while (nsIter.hasNext()) { OMNamespace ns = (OMNamespace) nsIter.next(); header.declareNamespace(ns); } // retrieve all child nodes (including any text nodes) // and re-attach to header block Iterator children = element.getChildren(); while (children.hasNext()) { OMNode child = (OMNode)children.next(); children.remove(); header.addChild(child); } headerBlocs.remove(); soapHeader.build(); header.setProcessed(); } if(header.isProcessed()) { processedHeaderQNames.add(element.getQName()); } } } XMLStreamReader reader = ((OMElement) doc.getDocumentElement()) .getXMLStreamReader(); SOAPModelBuilder stAXSOAPModelBuilder = OMXMLBuilderFactory.createStAXSOAPModelBuilder( reader); SOAPEnvelope envelope = stAXSOAPModelBuilder.getSOAPEnvelope(); //Set the processed flag of the processed headers SOAPHeader header = envelope.getHeader(); for (Iterator iter = processedHeaderQNames.iterator(); iter .hasNext();) { QName name = (QName) iter.next(); Iterator<SOAPHeaderBlock> omKids = header.getHeaderBlocksWithName(name); if(omKids.hasNext()) { omKids.next().setProcessed(); } } envelope.build(); return envelope; } catch (FactoryConfigurationError e) { throw new WSSecurityException(e.getMessage()); } } else { try { ByteArrayOutputStream os = new ByteArrayOutputStream(); XMLUtils.outputDOM(doc.getDocumentElement(), os, true); ByteArrayInputStream bais = new ByteArrayInputStream(os.toByteArray()); SOAPModelBuilder stAXSOAPModelBuilder = OMXMLBuilderFactory.createSOAPModelBuilder(bais, null); return stAXSOAPModelBuilder.getSOAPEnvelope(); } catch (Exception e) { throw new WSSecurityException(e.getMessage()); } } } /** * Provides the appropriate key to pickup config params from the message context. * This is acutally used when the outflow handler (WSDoAllSender) * is repeated n number of times. * @param originalKey The default key * @param inHandler Whether the handler is the inflow handler or not * @param repetition The current repetition number * @return Returns the key to be used internally in the security module to pick * up the config params. */ public static String getKey(String originalKey, boolean inHandler, int repetition) { if(repetition > 0 && !inHandler && !originalKey.equals(WSSHandlerConstants.OUTFLOW_SECURITY)&& !originalKey.equals(WSSHandlerConstants.SENDER_REPEAT_COUNT)) { return originalKey + repetition; } return originalKey; } /** * This will build a DOOM Element that is of the same <code>Document</code> * @param factory * @param element * @return */ public static OMElement toDOOM(OMFactory factory, OMElement element){ OMXMLParserWrapper builder = OMXMLBuilderFactory.createStAXOMBuilder(factory, element.getXMLStreamReader()); OMElement elem = builder.getDocumentElement(); elem.build(); return elem; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain; import com.google.common.base.Preconditions; import lombok.Getter; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.CommonStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.SQLParserTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.AlterResourceGroupStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.BinlogStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.CacheIndexStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.CheckTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ChecksumTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.CloneStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.CreateResourceGroupStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.DelimiterStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.DropResourceGroupStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ExplainStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.FlushStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.HelpStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.InstallComponentStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.InstallPluginStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.KillStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.LoadIndexInfoStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.OptimizeTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.RepairTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ResetPersistStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ResetStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.SetParameterStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.SetResourceGroupStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowBinlogEventsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowCharacterSetStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowCollationStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowColumnsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowCreateTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowCreateTriggerStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowCreateUserStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowDatabasesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowEventsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowFunctionStatusStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowIndexStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowOpenTablesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowProcedureCodeStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowProcedureStatusStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowRelaylogEventsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowReplicaStatusStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowReplicasStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowSlaveHostsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowSlaveStatusStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowStatusStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowTableStatusStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowTablesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowTriggersStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShowVariablesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.ShutdownStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.UninstallComponentStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.UninstallPluginStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dal.UseStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.AlterLoginStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.AlterRoleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.AlterUserStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.CreateLoginStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.CreateRoleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.CreateUserStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.DenyUserStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.DropLoginStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.DropRoleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.DropUserStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.GrantStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.RenameUserStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.RevokeStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.SetDefaultRoleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.SetPasswordStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.SetRoleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dcl.SetUserStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterAggregateStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterCollationStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterConversionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterDatabaseStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterDefaultPrivilegesTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterDimensionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterDomainStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterExtensionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterForeignDataWrapperTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterForeignTableTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterFunctionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterGroupStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterIndexStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterLanguageStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterMaterializedViewStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterProcedureStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterSchemaStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterSequenceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterServerStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterServiceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterSessionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterSynonymStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterSystemStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AlterTextSearchStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AnalyzeStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AssociateStatisticsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.AuditStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CommentStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateConversionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateDatabaseLinkStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateDatabaseStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateDimensionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateDomainStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateExtensionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateFunctionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateIndexStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateLanguageStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateProcedureStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateSchemaStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateSequenceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateServerStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateServiceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateTextSearchStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateTriggerStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateTypeStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.CreateViewStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DeclareStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DisassociateStatisticsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DiscardStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropConversionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropDatabaseStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropDimensionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropDomainStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropExtensionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropFunctionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropIndexStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropLanguageStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropProcedureStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropSchemaStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropSequenceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropServerStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropServiceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropTriggerStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.DropViewStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.FlashbackDatabaseStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.FlashbackTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.NoAuditStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.PreparedStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.PurgeStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.RenameStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.RenameTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl.TruncateStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.AddShardingHintDatabaseValueStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.AddShardingHintTableValueStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.AlterSQLParserRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.AlterTrafficRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ApplyDistSQLStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ClearHintStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ClearReadwriteSplittingHintStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ClearShardingHintStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.CreateTrafficRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.DiscardDistSQLStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.DropTrafficRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ExportSchemaConfigurationStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.LabelInstanceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ParseStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.PrepareDistSQLStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.PreviewStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.RefreshTableMetadataStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.SetReadwriteSplittingHintStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.SetShardingHintDatabaseValueStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.SetVariableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowAuthorityRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowInstanceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowReadwriteSplittingHintStatusStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowReadwriteSplittingReadResourcesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowSQLParserRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowScalingListStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowShardingHintStatusStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowTableMetadataStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowTrafficRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowTransactionRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.ShowVariableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.UnlabelInstanceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.scaling.ApplyScalingStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.scaling.CheckScalingStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.scaling.RestoreScalingSourceWritingStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.scaling.ShowScalingCheckAlgorithmsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.ral.scaling.StopScalingSourceWritingStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterDatabaseDiscoveryConstructionRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterDatabaseDiscoveryDefinitionRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterDatabaseDiscoveryHeartbeatStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterDatabaseDiscoveryTypeStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterDefaultShardingStrategyStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterDefaultSingleTableRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterEncryptRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterReadwriteSplittingRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterResourceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterShadowAlgorithmStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterShadowRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterShardingAlgorithmStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterShardingAutoTableRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterShardingBindingTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterShardingBroadcastTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterShardingKeyGeneratorStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.AlterShardingTableRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.DisableShardingScalingRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.alter.EnableShardingScalingRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.AddResourceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateDatabaseDiscoveryConstructionRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateDatabaseDiscoveryDefinitionRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateDatabaseDiscoveryHeartbeatStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateDatabaseDiscoveryTypeStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateDefaultShadowAlgorithmStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateDefaultShardingStrategyStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateDefaultSingleTableRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateEncryptRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateReadwriteSplittingRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateShadowAlgorithmStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateShadowRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateShardingAlgorithmStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateShardingAutoTableRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateShardingBindingTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateShardingBroadcastTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateShardingKeyGeneratorStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateShardingScalingRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.create.CreateShardingTableRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropDataBaseDiscoveryHeartbeatStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropDataBaseDiscoveryRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropDataBaseDiscoveryTypeStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropDefaultShardingStrategyStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropDefaultSingleTableRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropEncryptRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropReadwriteSplittingRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropResourceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropShadowAlgorithmStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropShadowRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropShardingBindingTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropShardingBroadcastTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropShardingKeyGeneratorStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropShardingScalingRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rdl.drop.DropShardingTableRuleStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.CountSchemaRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowDataBaseDiscoveryRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowDefaultShardingStrategyStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowEncryptRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowReadwriteSplittingRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowRulesUsedResourceStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShadowAlgorithmsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShadowRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShadowTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShardingAlgorithmsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShardingBindingTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShardingBroadcastTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShardingKeyGeneratorsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShardingScalingRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShardingTableNodesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShardingTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShardingTableRulesUsedKeyGeneratorStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowSingleTableRulesStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowSingleTableStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowUnusedShardingAlgorithmsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowUnusedShardingKeyGeneratorsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dml.CallStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dml.CopyStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dml.DeleteStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dml.InsertStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dml.MergeStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dml.SelectStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.dml.UpdateStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.tcl.BeginTransactionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.tcl.CommitStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.tcl.LockStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.tcl.RollbackStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.tcl.SavepointStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.tcl.SetAutoCommitStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.tcl.SetConstraintsStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.tcl.SetTransactionStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.tcl.UnlockStatementTestCase; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.tcl.XATestCase; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; /** * SQL parser test cases. */ @XmlRootElement(name = "sql-parser-test-cases") @Getter public final class SQLParserTestCases { @XmlElement(name = "select") private final List<SelectStatementTestCase> selectTestCases = new LinkedList<>(); @XmlElement(name = "update") private final List<UpdateStatementTestCase> updateTestCases = new LinkedList<>(); @XmlElement(name = "delete") private final List<DeleteStatementTestCase> deleteTestCases = new LinkedList<>(); @XmlElement(name = "insert") private final List<InsertStatementTestCase> insertTestCases = new LinkedList<>(); @XmlElement(name = "create-table") private final List<CreateTableStatementTestCase> createTableTestCases = new LinkedList<>(); @XmlElement(name = "alter-table") private final List<AlterTableStatementTestCase> alterTableTestCases = new LinkedList<>(); @XmlElement(name = "rename-table") private final List<RenameTableStatementTestCase> renameTableStatementTestCases = new LinkedList<>(); @XmlElement(name = "drop-table") private final List<DropTableStatementTestCase> dropTableTestCases = new LinkedList<>(); @XmlElement(name = "truncate") private final List<TruncateStatementTestCase> truncateTestCases = new LinkedList<>(); @XmlElement(name = "create-index") private final List<CreateIndexStatementTestCase> createIndexTestCases = new LinkedList<>(); @XmlElement(name = "alter-index") private final List<AlterIndexStatementTestCase> alterIndexTestCases = new LinkedList<>(); @XmlElement(name = "drop-index") private final List<DropIndexStatementTestCase> dropIndexTestCases = new LinkedList<>(); @XmlElement(name = "set-constraints") private final List<SetConstraintsStatementTestCase> setConstraintsTestCases = new LinkedList<>(); @XmlElement(name = "set-transaction") private final List<SetTransactionStatementTestCase> setTransactionTestCases = new LinkedList<>(); @XmlElement(name = "begin-transaction") private final List<BeginTransactionStatementTestCase> beginTransactionTestCases = new LinkedList<>(); @XmlElement(name = "set-auto-commit") private final List<SetAutoCommitStatementTestCase> setAutoCommitTestCases = new LinkedList<>(); @XmlElement(name = "commit") private final List<CommitStatementTestCase> commitTestCases = new LinkedList<>(); @XmlElement(name = "rollback") private final List<RollbackStatementTestCase> rollbackTestCases = new LinkedList<>(); @XmlElement(name = "savepoint") private final List<SavepointStatementTestCase> savepointTestCases = new LinkedList<>(); @XmlElement(name = "grant") private final List<GrantStatementTestCase> grantTestCases = new LinkedList<>(); @XmlElement(name = "revoke") private final List<RevokeStatementTestCase> revokeTestCases = new LinkedList<>(); @XmlElement(name = "create-user") private final List<CreateUserStatementTestCase> createUserTestCases = new LinkedList<>(); @XmlElement(name = "alter-user") private final List<AlterUserStatementTestCase> alterUserTestCases = new LinkedList<>(); @XmlElement(name = "drop-user") private final List<DropUserStatementTestCase> dropUserTestCases = new LinkedList<>(); @XmlElement(name = "rename-user") private final List<RenameUserStatementTestCase> renameUserTestCases = new LinkedList<>(); @XmlElement(name = "deny-user") private final List<DenyUserStatementTestCase> denyUserTestCases = new LinkedList<>(); @XmlElement(name = "create-login") private final List<CreateLoginStatementTestCase> createLoginTestCases = new LinkedList<>(); @XmlElement(name = "alter-login") private final List<AlterLoginStatementTestCase> alterLoginTestCases = new LinkedList<>(); @XmlElement(name = "drop-login") private final List<DropLoginStatementTestCase> dropLoginTestCases = new LinkedList<>(); @XmlElement(name = "create-role") private final List<CreateRoleStatementTestCase> createRoleTestCases = new LinkedList<>(); @XmlElement(name = "alter-role") private final List<AlterRoleStatementTestCase> alterRoleTestCases = new LinkedList<>(); @XmlElement(name = "drop-role") private final List<DropRoleStatementTestCase> dropRoleTestCases = new LinkedList<>(); @XmlElement(name = "set-default-role") private final List<SetDefaultRoleStatementTestCase> setDefaultRoleTestCases = new LinkedList<>(); @XmlElement(name = "set-role") private final List<SetRoleStatementTestCase> setRoleTestCases = new LinkedList<>(); @XmlElement(name = "set-password") private final List<SetPasswordStatementTestCase> setPasswordTestCases = new LinkedList<>(); @XmlElement(name = "use") private final List<UseStatementTestCase> useTestCases = new LinkedList<>(); @XmlElement(name = "describe") private final List<ExplainStatementTestCase> describeTestCases = new LinkedList<>(); @XmlElement(name = "show-databases") private final List<ShowDatabasesStatementTestCase> showDatabasesTestCases = new LinkedList<>(); @XmlElement(name = "show-tables") private final List<ShowTablesStatementTestCase> showTablesTestCases = new LinkedList<>(); @XmlElement(name = "show-function-status") private final List<ShowFunctionStatusStatementTestCase> showFunctionStatusStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-procedure-status") private final List<ShowProcedureStatusStatementTestCase> showProcedureStatusStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-procedure-code") private final List<ShowProcedureCodeStatementTestCase> showProcedureCodeStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-relaylog-events") private final List<ShowRelaylogEventsStatementTestCase> showRelaylogEventsStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-slave-hosts") private final List<ShowSlaveHostsStatementTestCase> showSlaveHostsStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-columns") private final List<ShowColumnsStatementTestCase> showColumnsTestCases = new LinkedList<>(); @XmlElement(name = "show-create-table") private final List<ShowCreateTableStatementTestCase> showCreateTableTestCases = new LinkedList<>(); @XmlElement(name = "show-create-trigger") private final List<ShowCreateTriggerStatementTestCase> showCreateTriggerTestCases = new LinkedList<>(); @XmlElement(name = "alter-resource-group") private final List<AlterResourceGroupStatementTestCase> alterResourceGroupStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-resource-group") private final List<CreateResourceGroupStatementTestCase> createResourceGroupStatementTestCases = new LinkedList<>(); @XmlElement(name = "drop-resource-group") private final List<DropResourceGroupStatementTestCase> dropResourceGroupStatementTestCases = new LinkedList<>(); @XmlElement(name = "binlog") private final List<BinlogStatementTestCase> binlogStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-create-user") private final List<ShowCreateUserStatementTestCase> showCreateUserTestCases = new LinkedList<>(); @XmlElement(name = "show-table-status") private final List<ShowTableStatusStatementTestCase> showTableStatusTestCases = new LinkedList<>(); @XmlElement(name = "show-index") private final List<ShowIndexStatementTestCase> showIndexTestCases = new LinkedList<>(); @XmlElement(name = "show") private final List<ShowStatementTestCase> showTestCases = new LinkedList<>(); @XmlElement(name = "set-parameter") private final List<SetParameterStatementTestCase> setVariableTestCases = new LinkedList<>(); @XmlElement(name = "common") private final List<CommonStatementTestCase> commonTestCases = new LinkedList<>(); @XmlElement(name = "alter-function") private final List<AlterFunctionStatementTestCase> alterFunctionTestCases = new LinkedList<>(); @XmlElement(name = "alter-database") private final List<AlterDatabaseStatementTestCase> alterDatabaseTestCase = new LinkedList<>(); @XmlElement(name = "alter-dimension") private final List<AlterDimensionStatementTestCase> alterDimensionTestCase = new LinkedList<>(); @XmlElement(name = "alter-procedure") private final List<AlterProcedureStatementTestCase> alterProcedureTestCase = new LinkedList<>(); @XmlElement(name = "alter-server") private final List<AlterServerStatementTestCase> alterServerTestCase = new LinkedList<>(); @XmlElement(name = "alter-session") private final List<AlterSessionStatementTestCase> alterSessionTestCase = new LinkedList<>(); @XmlElement(name = "alter-synonym") private final List<AlterSynonymStatementTestCase> alterSynonymTestCase = new LinkedList<>(); @XmlElement(name = "alter-system") private final List<AlterSystemStatementTestCase> alterSystemTestCase = new LinkedList<>(); @XmlElement(name = "create-database") private final List<CreateDatabaseStatementTestCase> createDatabaseTestCase = new LinkedList<>(); @XmlElement(name = "create-database-link") private final List<CreateDatabaseLinkStatementTestCase> createDatabaseLinkTestCase = new LinkedList<>(); @XmlElement(name = "create-dimension") private final List<CreateDimensionStatementTestCase> createDimensionTestCase = new LinkedList<>(); @XmlElement(name = "create-function") private final List<CreateFunctionStatementTestCase> createFunctionTestCase = new LinkedList<>(); @XmlElement(name = "create-procedure") private final List<CreateProcedureStatementTestCase> createProcedureTestCase = new LinkedList<>(); @XmlElement(name = "create-server") private final List<CreateServerStatementTestCase> createServerTestCase = new LinkedList<>(); @XmlElement(name = "create-trigger") private final List<CreateTriggerStatementTestCase> createTriggerTestCase = new LinkedList<>(); @XmlElement(name = "create-view") private final List<CreateViewStatementTestCase> createViewTestCase = new LinkedList<>(); @XmlElement(name = "create-domain") private final List<CreateDomainStatementTestCase> createDomainStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-rule") private final List<CreateRuleStatementTestCase> createRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-type") private final List<CreateTypeStatementTestCase> createTypeStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-extension") private final List<CreateExtensionStatementTestCase> createExtensionStatementTestCase = new LinkedList<>(); @XmlElement(name = "alter-extension") private final List<AlterExtensionStatementTestCase> alterExtensionStatementTestCase = new LinkedList<>(); @XmlElement(name = "drop-extension") private final List<DropExtensionStatementTestCase> dropExtensionStatementTestCase = new LinkedList<>(); @XmlElement(name = "declare") private final List<DeclareStatementTestCase> declareStatementTestCase = new LinkedList<>(); @XmlElement(name = "discard") private final List<DiscardStatementTestCase> discardStatementTestCase = new LinkedList<>(); @XmlElement(name = "drop-database") private final List<DropDatabaseStatementTestCase> dropDatabaseTestCase = new LinkedList<>(); @XmlElement(name = "drop-dimension") private final List<DropDimensionStatementTestCase> dropDimensionTestCase = new LinkedList<>(); @XmlElement(name = "drop-function") private final List<DropFunctionStatementTestCase> dropFunctionTestCase = new LinkedList<>(); @XmlElement(name = "drop-procedure") private final List<DropProcedureStatementTestCase> dropProcedureTestCase = new LinkedList<>(); @XmlElement(name = "drop-server") private final List<DropServerStatementTestCase> dropServerTestCase = new LinkedList<>(); @XmlElement(name = "drop-trigger") private final List<DropTriggerStatementTestCase> dropTriggerTestCase = new LinkedList<>(); @XmlElement(name = "drop-domain") private final List<DropDomainStatementTestCase> dropDomainStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-triggers") private final List<ShowTriggersStatementTestCase> showTriggerTestCase = new LinkedList<>(); @XmlElement(name = "drop-view") private final List<DropViewStatementTestCase> dropViewTestCase = new LinkedList<>(); @XmlElement(name = "call") private final List<CallStatementTestCase> callProcedureTestCase = new LinkedList<>(); @XmlElement(name = "copy") private final List<CopyStatementTestCase> copyStatementTestCase = new LinkedList<>(); @XmlElement(name = "xa") private final List<XATestCase> xaTestCase = new LinkedList<>(); @XmlElement(name = "merge") private final List<MergeStatementTestCase> mergeTestCase = new LinkedList<>(); @XmlElement(name = "create-sequence") private final List<CreateSequenceStatementTestCase> createSequenceTestCase = new LinkedList<>(); @XmlElement(name = "alter-sequence") private final List<AlterSequenceStatementTestCase> alterSequenceTestCase = new LinkedList<>(); @XmlElement(name = "drop-sequence") private final List<DropSequenceStatementTestCase> dropSequenceTestCase = new LinkedList<>(); @XmlElement(name = "analyze") private final List<AnalyzeStatementTestCase> analyzeTestCase = new LinkedList<>(); @XmlElement(name = "associate-statistics") private final List<AssociateStatisticsStatementTestCase> associateStatisticsTestCase = new LinkedList<>(); @XmlElement(name = "disassociate-statistics") private final List<DisassociateStatisticsStatementTestCase> disassociateStatisticsTestCase = new LinkedList<>(); @XmlElement(name = "audit") private final List<AuditStatementTestCase> auditTestCase = new LinkedList<>(); @XmlElement(name = "no-audit") private final List<NoAuditStatementTestCase> noAuditTestCase = new LinkedList<>(); @XmlElement(name = "comment") private final List<CommentStatementTestCase> commentTestCase = new LinkedList<>(); @XmlElement(name = "flashback-database") private final List<FlashbackDatabaseStatementTestCase> flashbackDatabaseTestCase = new LinkedList<>(); @XmlElement(name = "flashback-table") private final List<FlashbackTableStatementTestCase> flashbackTableTestCase = new LinkedList<>(); @XmlElement(name = "purge") private final List<PurgeStatementTestCase> purgeTestCase = new LinkedList<>(); @XmlElement(name = "rename") private final List<RenameStatementTestCase> renameTestCase = new LinkedList<>(); @XmlElement(name = "add-resource") private final List<AddResourceStatementTestCase> addResourceTestCase = new LinkedList<>(); @XmlElement(name = "alter-resource") private final List<AlterResourceStatementTestCase> alterResourceTestCase = new LinkedList<>(); @XmlElement(name = "alter-database-discovery-definition-rule") private final List<AlterDatabaseDiscoveryDefinitionRuleStatementTestCase> alterDatabaseDiscoveryDefinitionRuleTestCases = new LinkedList<>(); @XmlElement(name = "alter-database-discovery-construction-rule") private final List<AlterDatabaseDiscoveryConstructionRuleStatementTestCase> alterDataBaseDiscoveryConstructionRuleTestCase = new LinkedList<>(); @XmlElement(name = "alter-encrypt-rule") private final List<AlterEncryptRuleStatementTestCase> alterEncryptRuleTestCase = new LinkedList<>(); @XmlElement(name = "alter-readwrite-splitting-rule") private final List<AlterReadwriteSplittingRuleStatementTestCase> alterReadwriteSplittingRuleTestCase = new LinkedList<>(); @XmlElement(name = "alter-sharding-binding-table-rules") private final List<AlterShardingBindingTableRulesStatementTestCase> alterShardingBindingTableRulesTestCase = new LinkedList<>(); @XmlElement(name = "alter-sharding-broadcast-table-rules") private final List<AlterShardingBroadcastTableRulesStatementTestCase> alterShardingBroadcastTableRulesTestCase = new LinkedList<>(); @XmlElement(name = "alter-sharding-auto-table-rule") private final List<AlterShardingAutoTableRuleStatementTestCase> alterShardingTableRuleTestCase = new LinkedList<>(); @XmlElement(name = "alter-sharding-table-rule") private final List<AlterShardingTableRuleStatementTestCase> alterShardingTableRuleTestCases = new LinkedList<>(); @XmlElement(name = "create-database-discovery-definition-rule") private final List<CreateDatabaseDiscoveryDefinitionRuleStatementTestCase> createDatabaseDiscoveryDefinitionRuleTestCases = new LinkedList<>(); @XmlElement(name = "create-database-discovery-construction-rule") private final List<CreateDatabaseDiscoveryConstructionRuleStatementTestCase> createDataBaseDiscoveryConstructionRuleTestCase = new LinkedList<>(); @XmlElement(name = "create-database-discovery-type") private final List<CreateDatabaseDiscoveryTypeStatementTestCase> createDatabaseDiscoveryTypeTestCases = new LinkedList<>(); @XmlElement(name = "create-database-discovery-heartbeat") private final List<CreateDatabaseDiscoveryHeartbeatStatementTestCase> createDatabaseDiscoveryHeartbeatTestCases = new LinkedList<>(); @XmlElement(name = "alter-database-discovery-heartbeat") private final List<AlterDatabaseDiscoveryHeartbeatStatementTestCase> alterDatabaseDiscoveryHeartbeatTestCases = new LinkedList<>(); @XmlElement(name = "alter-database-discovery-type") private final List<AlterDatabaseDiscoveryTypeStatementTestCase> alterDatabaseDiscoveryTypeTestCases = new LinkedList<>(); @XmlElement(name = "create-encrypt-rule") private final List<CreateEncryptRuleStatementTestCase> createEncryptRuleTestCase = new LinkedList<>(); @XmlElement(name = "create-readwrite-splitting-rule") private final List<CreateReadwriteSplittingRuleStatementTestCase> createReadwriteSplittingRuleTestCase = new LinkedList<>(); @XmlElement(name = "create-sharding-binding-table-rule") private final List<CreateShardingBindingTableRulesStatementTestCase> createShardingBindingTableRulesTestCase = new LinkedList<>(); @XmlElement(name = "create-sharding-broadcast-table-rule") private final List<CreateShardingBroadcastTableRulesStatementTestCase> createShardingBroadcastTableRulesTestCase = new LinkedList<>(); @XmlElement(name = "create-sharding-auto-table-rule") private final List<CreateShardingAutoTableRuleStatementTestCase> createShardingTableRuleTestCase = new LinkedList<>(); @XmlElement(name = "create-sharding-table-rule") private final List<CreateShardingTableRuleStatementTestCase> createShardingTableRuleTestCases = new LinkedList<>(); @XmlElement(name = "drop-database-discovery-rule") private final List<DropDataBaseDiscoveryRuleStatementTestCase> dropDataBaseDiscoveryRuleTestCase = new LinkedList<>(); @XmlElement(name = "drop-database-discovery-type") private final List<DropDataBaseDiscoveryTypeStatementTestCase> dropDataBaseDiscoveryTypeTestCases = new LinkedList<>(); @XmlElement(name = "drop-database-discovery-heartbeat") private final List<DropDataBaseDiscoveryHeartbeatStatementTestCase> dropDataBaseDiscoveryHeartbeatTestCases = new LinkedList<>(); @XmlElement(name = "drop-encrypt-rule") private final List<DropEncryptRuleStatementTestCase> dropEncryptRuleTestCase = new LinkedList<>(); @XmlElement(name = "drop-readwrite-splitting-rule") private final List<DropReadwriteSplittingRuleStatementTestCase> dropReadwriteSplittingRuleTestCase = new LinkedList<>(); @XmlElement(name = "drop-resource") private final List<DropResourceStatementTestCase> dropResourceTestCase = new LinkedList<>(); @XmlElement(name = "drop-sharding-binding-table-rules") private final List<DropShardingBindingTableRulesStatementTestCase> dropShardingBindingTableRulesTestCase = new LinkedList<>(); @XmlElement(name = "drop-sharding-broadcast-table-rules") private final List<DropShardingBroadcastTableRulesStatementTestCase> dropShardingBroadcastTableRulesTestCase = new LinkedList<>(); @XmlElement(name = "drop-sharding-table-rule") private final List<DropShardingTableRuleStatementTestCase> dropShardingTableRuleTestCase = new LinkedList<>(); @XmlElement(name = "show-db-discovery-rules") private final List<ShowDataBaseDiscoveryRulesStatementTestCase> showDataBaseDiscoveryRulesTestCase = new LinkedList<>(); @XmlElement(name = "show-encrypt-rules") private final List<ShowEncryptRulesStatementTestCase> showEncryptRulesTestCase = new LinkedList<>(); @XmlElement(name = "show-readwrite-splitting-rules") private final List<ShowReadwriteSplittingRulesStatementTestCase> showReadwriteSplittingRulesTestCase = new LinkedList<>(); @XmlElement(name = "show-sharding-binding-table-rules") private final List<ShowShardingBindingTableRulesStatementTestCase> showShardingBindingTableRulesTestCase = new LinkedList<>(); @XmlElement(name = "show-sharding-broadcast-table-rules") private final List<ShowShardingBroadcastTableRulesStatementTestCase> showShardingBroadcastTableRulesTestCase = new LinkedList<>(); @XmlElement(name = "show-sharding-algorithms") private final List<ShowShardingAlgorithmsStatementTestCase> showShardingAlgorithmsTestCase = new LinkedList<>(); @XmlElement(name = "show-sharding-table-rules") private final List<ShowShardingTableRulesStatementTestCase> showShardingTableRulesTestCase = new LinkedList<>(); @XmlElement(name = "show-sharding-table-rule") private final List<ShowShardingTableRulesStatementTestCase> showShardingTableRuleTestCase = new LinkedList<>(); @XmlElement(name = "show-scaling-list") private final List<ShowScalingListStatementTestCase> showScalingListStatementTestCase = new LinkedList<>(); @XmlElement(name = "check-scaling") private final List<CheckScalingStatementTestCase> checkScalingStatementTestCase = new LinkedList<>(); @XmlElement(name = "show-scaling-check-algorithms") private final List<ShowScalingCheckAlgorithmsStatementTestCase> showScalingCheckAlgorithmsStatementTestCase = new LinkedList<>(); @XmlElement(name = "stop-scaling-source-writing") private final List<StopScalingSourceWritingStatementTestCase> stopScalingSourceWritingStatementTestCase = new LinkedList<>(); @XmlElement(name = "restore-scaling-source-writing") private final List<RestoreScalingSourceWritingStatementTestCase> restoreScalingSourceWritingStatementTestCase = new LinkedList<>(); @XmlElement(name = "apply-scaling") private final List<ApplyScalingStatementTestCase> applyScalingStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-sharding-scaling-rule") private final List<CreateShardingScalingRuleStatementTestCase> createShardingScalingRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "drop-sharding-scaling-rule") private final List<DropShardingScalingRuleStatementTestCase> dropShardingScalingRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "enable-sharding-scaling-rule") private final List<EnableShardingScalingRuleStatementTestCase> enableShardingScalingRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "disable-sharding-scaling-rule") private final List<DisableShardingScalingRuleStatementTestCase> disableShardingScalingRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-sharding-scaling-rules") private final List<ShowShardingScalingRulesStatementTestCase> showShardingScalingRulesStatementTestCases = new LinkedList<>(); @XmlElement(name = "preview-sql") private final List<PreviewStatementTestCase> previewStatementTestCase = new LinkedList<>(); @XmlElement(name = "parse-sql") private final List<ParseStatementTestCase> parseStatementAsserts = new LinkedList<>(); @XmlElement(name = "show-variable") private final List<ShowVariableStatementTestCase> showVariableStatementTestCase = new LinkedList<>(); @XmlElement(name = "set-variable") private final List<SetVariableStatementTestCase> setVariableStatementTestCase = new LinkedList<>(); @XmlElement(name = "set-readwrite-splitting-hint-source") private final List<SetReadwriteSplittingHintStatementTestCase> setReadwriteSplittingHintStatementTestCase = new LinkedList<>(); @XmlElement(name = "set-sharding-hint-database-value") private final List<SetShardingHintDatabaseValueStatementTestCase> setShardingHintDatabaseValueStatementTestCase = new LinkedList<>(); @XmlElement(name = "add-sharding-hint-database-value") private final List<AddShardingHintDatabaseValueStatementTestCase> addShardingHintDatabaseValueStatementTestCase = new LinkedList<>(); @XmlElement(name = "add-sharding-hint-table-value") private final List<AddShardingHintTableValueStatementTestCase> addShardingHintTableValueStatementTestCase = new LinkedList<>(); @XmlElement(name = "show-readwrite-splitting-hint-source") private final List<ShowReadwriteSplittingHintStatusStatementTestCase> showReadwriteSplittingHintStatusStatementTestCase = new LinkedList<>(); @XmlElement(name = "show-sharding-hint-status") private final List<ShowShardingHintStatusStatementTestCase> showShardingHintStatusStatementTestCase = new LinkedList<>(); @XmlElement(name = "clear-readwrite-splitting-hint-source") private final List<ClearReadwriteSplittingHintStatementTestCase> clearReadwriteSplittingHintStatementTestCase = new LinkedList<>(); @XmlElement(name = "clear-sharding-hint") private final List<ClearShardingHintStatementTestCase> clearShardingHintStatementTestCase = new LinkedList<>(); @XmlElement(name = "clear-hint") private final List<ClearHintStatementTestCase> clearHintStatementTestCase = new LinkedList<>(); @XmlElement(name = "create-shadow-rule") private final List<CreateShadowRuleStatementTestCase> createShadowRuleTestCase = new LinkedList<>(); @XmlElement(name = "drop-shadow-rule") private final List<DropShadowRuleStatementTestCase> dropShadowRuleTestCase = new LinkedList<>(); @XmlElement(name = "alter-shadow-rule") private final List<AlterShadowRuleStatementTestCase> alterShadowRuleTestCase = new LinkedList<>(); @XmlElement(name = "alter-shadow-algorithm") private final List<AlterShadowAlgorithmStatementTestCase> alterShadowAlgorithmTestCase = new LinkedList<>(); @XmlElement(name = "create-shadow-algorithm") private final List<CreateShadowAlgorithmStatementTestCase> createShadowAlgorithmTestCase = new LinkedList<>(); @XmlElement(name = "show-shadow-rules") private final List<ShowShadowRulesStatementTestCase> showShadowRulesStatementTestCase = new LinkedList<>(); @XmlElement(name = "show-shadow-algorithms") private final List<ShowShadowAlgorithmsStatementTestCase> showShadowAlgorithmsStatementTestCase = new LinkedList<>(); @XmlElement(name = "show-shadow-table-rules") private final List<ShowShadowTableRulesStatementTestCase> showShadowTableRulesStatementTestCase = new LinkedList<>(); @XmlElement(name = "drop-shadow-algorithm") private final List<DropShadowAlgorithmStatementTestCase> dropShadowAlgorithmStatementTestCase = new LinkedList<>(); @XmlElement(name = "create-service") private final List<CreateServiceStatementTestCase> createServiceTestCase = new LinkedList<>(); @XmlElement(name = "alter-service") private final List<AlterServiceStatementTestCase> alterServiceTestCase = new LinkedList<>(); @XmlElement(name = "drop-service") private final List<DropServiceStatementTestCase> dropServiceTestCase = new LinkedList<>(); @XmlElement(name = "create-schema") private final List<CreateSchemaStatementTestCase> createSchemaTestCase = new LinkedList<>(); @XmlElement(name = "alter-schema") private final List<AlterSchemaStatementTestCase> alterSchemaTestCase = new LinkedList<>(); @XmlElement(name = "drop-schema") private final List<DropSchemaStatementTestCase> dropSchemaTestCase = new LinkedList<>(); @XmlElement(name = "install-component") private final List<InstallComponentStatementTestCase> installComponentTestCase = new LinkedList<>(); @XmlElement(name = "flush") private final List<FlushStatementTestCase> flushStatementTestCase = new LinkedList<>(); @XmlElement(name = "install-plugin") private final List<InstallPluginStatementTestCase> installPluginStatementTestCase = new LinkedList<>(); @XmlElement(name = "show-instance") private final List<ShowInstanceStatementTestCase> showInstanceStatementTestCases = new LinkedList<>(); @XmlElement(name = "clone") private final List<CloneStatementTestCase> cloneStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-readwrite-splitting-read-resources") private final List<ShowReadwriteSplittingReadResourcesStatementTestCase> showReadwriteSplittingReadResourcesStatementTestCases = new LinkedList<>(); @XmlElement(name = "uninstall-component") private final List<UninstallComponentStatementTestCase> uninstallComponentStatementTestCases = new LinkedList<>(); @XmlElement(name = "uninstall-plugin") private final List<UninstallPluginStatementTestCase> uninstallPluginStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-single-table") private final List<ShowSingleTableStatementTestCase> showSingleTableStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-single-table-rules") private final List<ShowSingleTableRulesStatementTestCase> showSingleTableRulesStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-sharding-table-nodes") private final List<ShowShardingTableNodesStatementTestCase> showShardingTableNodesStatementTestCases = new LinkedList<>(); @XmlElement(name = "set-resource-group") private final List<SetResourceGroupStatementTestCase> setResourceGroupStatementTestCases = new LinkedList<>(); @XmlElement(name = "optimize-table") private final List<OptimizeTableStatementTestCase> optimizeTableStatementTestCases = new LinkedList<>(); @XmlElement(name = "repair-table") private final List<RepairTableStatementTestCase> repairTableStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-sharding-algorithm") private final List<CreateShardingAlgorithmStatementTestCase> createShardingAlgorithmStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-sharding-key-generator") private final List<CreateShardingKeyGeneratorStatementTestCase> createShardingKeyGeneratorStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-default-sharding-strategy") private final List<CreateDefaultShardingStrategyStatementTestCase> createDefaultShardingStrategyStatementTestCases = new LinkedList<>(); @XmlElement(name = "alter-default-sharding-strategy") private final List<AlterDefaultShardingStrategyStatementTestCase> alterDefaultShardingStrategyStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-default-shadow-algorithm") private final List<CreateDefaultShadowAlgorithmStatementTestCase> createDefaultShadowAlgorithmStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-replicas") private final List<ShowReplicasStatementTestCase> showReplicasStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-replica-status") private final List<ShowReplicaStatusStatementTestCase> showReplicaStatusStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-slave-status") private final List<ShowSlaveStatusStatementTestCase> showSlaveStatusStatementTestCases = new LinkedList<>(); @XmlElement(name = "alter-sharding-algorithm") private final List<AlterShardingAlgorithmStatementTestCase> alterShardingAlgorithmStatementTestCases = new LinkedList<>(); @XmlElement(name = "alter-sharding-key-generator") private final List<AlterShardingKeyGeneratorStatementTestCase> alterShardingKeyGeneratorStatementTestCases = new LinkedList<>(); @XmlElement(name = "drop-sharding-key-generator") private final List<DropShardingKeyGeneratorStatementTestCase> dropShardingKeyGeneratorStatementTestCases = new LinkedList<>(); @XmlElement(name = "drop-default-sharding-strategy") private final List<DropDefaultShardingStrategyStatementTestCase> dropDefaultShardingStrategyStatementTestCases = new LinkedList<>(); @XmlElement(name = "reset") private final List<ResetStatementTestCase> resetStatementTestCases = new LinkedList<>(); @XmlElement(name = "reset-persist") private final List<ResetPersistStatementTestCase> resetPersistStatementTestCases = new LinkedList<>(); @XmlElement(name = "cache-index") private final List<CacheIndexStatementTestCase> cacheIndexStatementTestCases = new LinkedList<>(); @XmlElement(name = "load-index") private final List<LoadIndexInfoStatementTestCase> loadIndexInfoStatementTestCases = new LinkedList<>(); @XmlElement(name = "kill") private final List<KillStatementTestCase> killStatementTestCases = new LinkedList<>(); @XmlElement(name = "shutdown") private final List<ShutdownStatementTestCase> shutdownStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-default-single-table") private final List<CreateDefaultSingleTableRuleStatementTestCase> createDefaultSingleTableRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "alter-default-single-table") private final List<AlterDefaultSingleTableRuleStatementTestCase> alterDefaultSingleTableRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "drop-default-single-table") private final List<DropDefaultSingleTableRuleStatementTestCase> dropDefaultSingleTableRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-open-tables") private final List<ShowOpenTablesStatementTestCase> showOpenTablesStatementTestCases = new LinkedList<>(); @XmlElement(name = "check-table") private final List<CheckTableStatementTestCase> checkTableTestCases = new LinkedList<>(); @XmlElement(name = "checksum-table") private final List<ChecksumTableStatementTestCase> checksumTableTestCases = new LinkedList<>(); @XmlElement(name = "show-status") private final List<ShowStatusStatementTestCase> showStatusStatementTestCases = new LinkedList<>(); @XmlElement(name = "refresh-table-metadata") private final List<RefreshTableMetadataStatementTestCase> refreshTableMetadataStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-events") private final List<ShowEventsStatementTestCase> showEventsStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-character-set") private final List<ShowCharacterSetStatementTestCase> showCharacterSetStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-collation") private final List<ShowCollationStatementTestCase> showCollationStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-variables") private final List<ShowVariablesStatementTestCase> showVariablesStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-sharding-key-generators") private final List<ShowShardingKeyGeneratorsStatementTestCase> showShardingKeyGeneratorsStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-default-sharding-strategy") private final List<ShowDefaultShardingStrategyStatementTestCase> showDefaultShardingStrategyStatementTestCases = new LinkedList<>(); @XmlElement(name = "delimiter") private final List<DelimiterStatementTestCase> delimiterStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-sql-parser-rule") private final List<ShowSQLParserRuleStatementTestCase> showSQLParserRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-authority-rule") private final List<ShowAuthorityRuleStatementTestCase> showAuthorityRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-transaction-rule") private final List<ShowTransactionRuleStatementTestCase> showTransactionRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-traffic-rules") private final List<ShowTrafficRulesStatementTestCase> showTrafficRulesStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-traffic-rule") private final List<CreateTrafficRuleStatementTestCase> createTrafficRulesStatementTestCases = new LinkedList<>(); @XmlElement(name = "alter-traffic-rule") private final List<AlterTrafficRuleStatementTestCase> alterTrafficRulesStatementTestCases = new LinkedList<>(); @XmlElement(name = "alter-sql-parser-rule") private final List<AlterSQLParserRuleStatementTestCase> alterSQLParserRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "drop-traffic-rule") private final List<DropTrafficRuleStatementTestCase> dropTrafficRuleStatementTestCases = new LinkedList<>(); @XmlElement(name = "label-instance") private final List<LabelInstanceStatementTestCase> labelStatementTestCases = new LinkedList<>(); @XmlElement(name = "unlabel-instance") private final List<UnlabelInstanceStatementTestCase> unlabelStatementTestCases = new LinkedList<>(); @XmlElement(name = "prepare-distsql") private final List<PrepareDistSQLStatementTestCase> prepareDistSQLStatementTestCases = new LinkedList<>(); @XmlElement(name = "apply-distsql") private final List<ApplyDistSQLStatementTestCase> applyDistSQLStatementTestCases = new LinkedList<>(); @XmlElement(name = "discard-distsql") private final List<DiscardDistSQLStatementTestCase> discardDistSQLStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-table-metadata") private final List<ShowTableMetadataStatementTestCase> showTableMetadataStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-conversion") private final List<CreateConversionStatementTestCase> createConversionStatementTestCase = new LinkedList<>(); @XmlElement(name = "drop-conversion") private final List<DropConversionStatementTestCase> dropConversionStatementTestCase = new LinkedList<>(); @XmlElement(name = "alter-domain") private final List<AlterDomainStatementTestCase> alterDomainStatementTestCase = new LinkedList<>(); @XmlElement(name = "alter-aggregate") private final List<AlterAggregateStatementTestCase> alterAggregateStatementTestCase = new LinkedList<>(); @XmlElement(name = "alter-conversion") private final List<AlterConversionStatementTestCase> alterConversionStatementTestCase = new LinkedList<>(); @XmlElement(name = "alter-collation") private final List<AlterCollationStatementTestCase> alterCollationStatementTestCase = new LinkedList<>(); @XmlElement(name = "alter-default-privileges") private final List<AlterDefaultPrivilegesTestCase> alterDefaultPrivilegesTestCase = new LinkedList<>(); @XmlElement(name = "alter-foreign-data-wrapper") private final List<AlterForeignDataWrapperTestCase> alterForeignDataWrapperTestCase = new LinkedList<>(); @XmlElement(name = "alter-foreign-table") private final List<AlterForeignTableTestCase> alterForeignTableTestCase = new LinkedList<>(); @XmlElement(name = "alter-group") private final List<AlterGroupStatementTestCase> alterGroupStatementTestCase = new LinkedList<>(); @XmlElement(name = "alter-materialized-view") private final List<AlterMaterializedViewStatementTestCase> alterMaterializedViewStatementTestCase = new LinkedList<>(); @XmlElement(name = "create-text-search") private final List<CreateTextSearchStatementTestCase> createTextSearchStatementTestCases = new LinkedList<>(); @XmlElement(name = "alter-text-search") private final List<AlterTextSearchStatementTestCase> alterTextSearchStatementTestCases = new LinkedList<>(); @XmlElement(name = "create-language") private final List<CreateLanguageStatementTestCase> createLanguageStatementTestCases = new LinkedList<>(); @XmlElement(name = "alter-language") private final List<AlterLanguageStatementTestCase> alterLanguageStatementTestCases = new LinkedList<>(); @XmlElement(name = "drop-language") private final List<DropLanguageStatementTestCase> dropLanguageStatementTestCases = new LinkedList<>(); @XmlElement(name = "help") private final List<HelpStatementTestCase> helpStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-unused-sharding-algorithms") private final List<ShowUnusedShardingAlgorithmsStatementTestCase> showUnusedShardingAlgorithmsStatementTestCases = new LinkedList<>(); @XmlElement(name = "count-schema-rules") private final List<CountSchemaRulesStatementTestCase> countSchemaRulesStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-unused-sharding-key-generators") private final List<ShowUnusedShardingKeyGeneratorsStatementTestCase> showUnusedShardingKeyGeneratorsStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-binlog-events") private final List<ShowBinlogEventsStatementTestCase> showBinlogEventsStatementTestCases = new LinkedList<>(); @XmlElement(name = "lock") private final List<LockStatementTestCase> lockStatementTestCases = new LinkedList<>(); @XmlElement(name = "unlock") private final List<UnlockStatementTestCase> unlockStatementTestCases = new LinkedList<>(); @XmlElement(name = "export-schema-config") private final List<ExportSchemaConfigurationStatementTestCase> exportSchemaConfigurationStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-rules-used-resource") private final List<ShowRulesUsedResourceStatementTestCase> showRulesUsedResourceStatementTestCases = new LinkedList<>(); @XmlElement(name = "show-sharding-table-rules-used-key-generator") private final List<ShowShardingTableRulesUsedKeyGeneratorStatementTestCase> showShardingTableRulesUsedKeyGeneratorStatementTestCases = new LinkedList<>(); @XmlElement(name = "prepared") private final List<PreparedStatementTestCase> preparedStatementTestCases = new LinkedList<>(); @XmlElement(name = "set-user") private final List<SetUserStatementTestCase> setUserStatementTestCases = new LinkedList<>(); /** * Get all SQL parser test cases. * * @return all SQL parser test cases */ // CHECKSTYLE:OFF public Map<String, SQLParserTestCase> getAllSQLParserTestCases() { Map<String, SQLParserTestCase> result = new HashMap<>(); putAll(selectTestCases, result); putAll(updateTestCases, result); putAll(deleteTestCases, result); putAll(insertTestCases, result); putAll(createTableTestCases, result); putAll(alterTableTestCases, result); putAll(dropTableTestCases, result); putAll(truncateTestCases, result); putAll(createIndexTestCases, result); putAll(alterIndexTestCases, result); putAll(dropIndexTestCases, result); putAll(setConstraintsTestCases, result); putAll(setTransactionTestCases, result); putAll(beginTransactionTestCases, result); putAll(setAutoCommitTestCases, result); putAll(commitTestCases, result); putAll(rollbackTestCases, result); putAll(savepointTestCases, result); putAll(grantTestCases, result); putAll(revokeTestCases, result); putAll(createUserTestCases, result); putAll(alterUserTestCases, result); putAll(dropUserTestCases, result); putAll(renameUserTestCases, result); putAll(denyUserTestCases, result); putAll(createLoginTestCases, result); putAll(alterLoginTestCases, result); putAll(dropLoginTestCases, result); putAll(createRoleTestCases, result); putAll(alterRoleTestCases, result); putAll(dropRoleTestCases, result); putAll(setDefaultRoleTestCases, result); putAll(setRoleTestCases, result); putAll(setPasswordTestCases, result); putAll(useTestCases, result); putAll(describeTestCases, result); putAll(showDatabasesTestCases, result); putAll(showTablesTestCases, result); putAll(showFunctionStatusStatementTestCases, result); putAll(showProcedureStatusStatementTestCases, result); putAll(showRelaylogEventsStatementTestCases, result); putAll(showSlaveHostsStatementTestCases, result); putAll(showProcedureCodeStatementTestCases, result); putAll(showColumnsTestCases, result); putAll(showCreateTableTestCases, result); putAll(showCreateTriggerTestCases, result); putAll(showCreateUserTestCases, result); putAll(showTableStatusTestCases, result); putAll(showIndexTestCases, result); putAll(showTestCases, result); putAll(setVariableTestCases, result); putAll(commonTestCases, result); putAll(alterFunctionTestCases, result); putAll(alterServerTestCase, result); putAll(alterSessionTestCase, result); putAll(alterSynonymTestCase, result); putAll(alterSystemTestCase, result); putAll(alterProcedureTestCase, result); putAll(alterDatabaseTestCase, result); putAll(alterDimensionTestCase, result); putAll(createViewTestCase, result); putAll(createTriggerTestCase, result); putAll(createServerTestCase, result); putAll(createProcedureTestCase, result); putAll(createFunctionTestCase, result); putAll(createDatabaseTestCase, result); putAll(createDatabaseLinkTestCase, result); putAll(createDimensionTestCase, result); putAll(dropDimensionTestCase, result); putAll(dropViewTestCase, result); putAll(dropTriggerTestCase, result); putAll(showTriggerTestCase, result); putAll(dropServerTestCase, result); putAll(dropProcedureTestCase, result); putAll(dropFunctionTestCase, result); putAll(dropDatabaseTestCase, result); putAll(callProcedureTestCase, result); putAll(copyStatementTestCase, result); putAll(xaTestCase, result); putAll(mergeTestCase, result); putAll(createSequenceTestCase, result); putAll(alterSequenceTestCase, result); putAll(dropSequenceTestCase, result); putAll(analyzeTestCase, result); putAll(associateStatisticsTestCase, result); putAll(disassociateStatisticsTestCase, result); putAll(auditTestCase, result); putAll(noAuditTestCase, result); putAll(commentTestCase, result); putAll(flashbackDatabaseTestCase, result); putAll(flashbackTableTestCase, result); putAll(purgeTestCase, result); putAll(renameTestCase, result); putAll(addResourceTestCase, result); putAll(alterResourceTestCase, result); putAll(alterDatabaseDiscoveryDefinitionRuleTestCases, result); putAll(alterDataBaseDiscoveryConstructionRuleTestCase, result); putAll(alterEncryptRuleTestCase, result); putAll(alterReadwriteSplittingRuleTestCase, result); putAll(alterShardingBindingTableRulesTestCase, result); putAll(alterShardingBroadcastTableRulesTestCase, result); putAll(alterShardingTableRuleTestCase, result); putAll(createDatabaseDiscoveryDefinitionRuleTestCases, result); putAll(createDataBaseDiscoveryConstructionRuleTestCase, result); putAll(createDatabaseDiscoveryTypeTestCases, result); putAll(createDatabaseDiscoveryHeartbeatTestCases, result); putAll(alterDatabaseDiscoveryHeartbeatTestCases, result); putAll(alterDatabaseDiscoveryTypeTestCases, result); putAll(createEncryptRuleTestCase, result); putAll(createReadwriteSplittingRuleTestCase, result); putAll(createShardingBindingTableRulesTestCase, result); putAll(createShardingBroadcastTableRulesTestCase, result); putAll(createShardingTableRuleTestCase, result); putAll(dropDataBaseDiscoveryRuleTestCase, result); putAll(dropDataBaseDiscoveryTypeTestCases, result); putAll(dropDataBaseDiscoveryHeartbeatTestCases, result); putAll(dropResourceTestCase, result); putAll(dropEncryptRuleTestCase, result); putAll(dropReadwriteSplittingRuleTestCase, result); putAll(dropShardingBindingTableRulesTestCase, result); putAll(dropShardingBroadcastTableRulesTestCase, result); putAll(dropShardingTableRuleTestCase, result); putAll(showDataBaseDiscoveryRulesTestCase, result); putAll(showEncryptRulesTestCase, result); putAll(showReadwriteSplittingRulesTestCase, result); putAll(showShardingBindingTableRulesTestCase, result); putAll(showShardingBroadcastTableRulesTestCase, result); putAll(showShardingAlgorithmsTestCase, result); putAll(showShardingTableRulesTestCase, result); putAll(showShardingTableRuleTestCase, result); putAll(showScalingListStatementTestCase, result); putAll(checkScalingStatementTestCase, result); putAll(showScalingCheckAlgorithmsStatementTestCase, result); putAll(stopScalingSourceWritingStatementTestCase, result); putAll(restoreScalingSourceWritingStatementTestCase, result); putAll(applyScalingStatementTestCases, result); putAll(createShardingScalingRuleStatementTestCases, result); putAll(dropShardingScalingRuleStatementTestCases, result); putAll(enableShardingScalingRuleStatementTestCases, result); putAll(disableShardingScalingRuleStatementTestCases, result); putAll(showShardingScalingRulesStatementTestCases, result); putAll(showVariableStatementTestCase, result); putAll(setVariableStatementTestCase, result); putAll(previewStatementTestCase, result); putAll(parseStatementAsserts, result); putAll(setReadwriteSplittingHintStatementTestCase, result); putAll(setShardingHintDatabaseValueStatementTestCase, result); putAll(addShardingHintDatabaseValueStatementTestCase, result); putAll(addShardingHintTableValueStatementTestCase, result); putAll(showReadwriteSplittingHintStatusStatementTestCase, result); putAll(showShardingHintStatusStatementTestCase, result); putAll(clearReadwriteSplittingHintStatementTestCase, result); putAll(clearShardingHintStatementTestCase, result); putAll(clearHintStatementTestCase, result); putAll(createShadowRuleTestCase, result); putAll(dropShadowRuleTestCase, result); putAll(alterShadowRuleTestCase, result); putAll(alterShadowAlgorithmTestCase, result); putAll(showShadowRulesStatementTestCase, result); putAll(showShadowTableRulesStatementTestCase, result); putAll(showShadowAlgorithmsStatementTestCase, result); putAll(dropShadowAlgorithmStatementTestCase, result); putAll(createServiceTestCase, result); putAll(alterServiceTestCase, result); putAll(dropServiceTestCase, result); putAll(createSchemaTestCase, result); putAll(alterSchemaTestCase, result); putAll(dropSchemaTestCase, result); putAll(installComponentTestCase, result); putAll(flushStatementTestCase, result); putAll(installPluginStatementTestCase, result); putAll(showInstanceStatementTestCases, result); putAll(cloneStatementTestCases, result); putAll(showReadwriteSplittingReadResourcesStatementTestCases, result); putAll(uninstallComponentStatementTestCases, result); putAll(alterResourceGroupStatementTestCases, result); putAll(createResourceGroupStatementTestCases, result); putAll(dropResourceGroupStatementTestCases, result); putAll(binlogStatementTestCases, result); putAll(uninstallPluginStatementTestCases, result); putAll(showSingleTableStatementTestCases, result); putAll(showSingleTableRulesStatementTestCases, result); putAll(showShardingTableNodesStatementTestCases, result); putAll(setResourceGroupStatementTestCases, result); putAll(optimizeTableStatementTestCases, result); putAll(repairTableStatementTestCases, result); putAll(createShardingAlgorithmStatementTestCases, result); putAll(createShardingKeyGeneratorStatementTestCases, result); putAll(createDefaultShardingStrategyStatementTestCases, result); putAll(alterDefaultShardingStrategyStatementTestCases, result); putAll(createShardingTableRuleTestCases, result); putAll(alterShardingTableRuleTestCases, result); putAll(resetStatementTestCases, result); putAll(resetPersistStatementTestCases, result); putAll(showReplicasStatementTestCases, result); putAll(showReplicaStatusStatementTestCases, result); putAll(showSlaveStatusStatementTestCases, result); putAll(alterShardingAlgorithmStatementTestCases, result); putAll(alterShardingKeyGeneratorStatementTestCases, result); putAll(killStatementTestCases, result); putAll(createDefaultShadowAlgorithmStatementTestCases, result); putAll(cacheIndexStatementTestCases, result); putAll(loadIndexInfoStatementTestCases, result); putAll(createShadowAlgorithmTestCase, result); putAll(createDefaultSingleTableRuleStatementTestCases, result); putAll(alterDefaultSingleTableRuleStatementTestCases, result); putAll(dropDefaultSingleTableRuleStatementTestCases, result); putAll(shutdownStatementTestCases, result); putAll(showOpenTablesStatementTestCases, result); putAll(showStatusStatementTestCases, result); putAll(checkTableTestCases, result); putAll(checksumTableTestCases, result); putAll(refreshTableMetadataStatementTestCases, result); putAll(showEventsStatementTestCases, result); putAll(showCharacterSetStatementTestCases, result); putAll(showCollationStatementTestCases, result); putAll(showVariablesStatementTestCases, result); putAll(showShardingKeyGeneratorsStatementTestCases, result); putAll(showDefaultShardingStrategyStatementTestCases, result); putAll(dropShardingKeyGeneratorStatementTestCases, result); putAll(dropDefaultShardingStrategyStatementTestCases, result); putAll(delimiterStatementTestCases, result); putAll(dropDomainStatementTestCases, result); putAll(showSQLParserRuleStatementTestCases, result); putAll(createDomainStatementTestCases, result); putAll(createRuleStatementTestCases, result); putAll(showAuthorityRuleStatementTestCases, result); putAll(showTransactionRuleStatementTestCases, result); putAll(showTrafficRulesStatementTestCases, result); putAll(createTrafficRulesStatementTestCases, result); putAll(alterTrafficRulesStatementTestCases, result); putAll(alterSQLParserRuleStatementTestCases, result); putAll(createTypeStatementTestCases, result); putAll(createConversionStatementTestCase, result); putAll(dropConversionStatementTestCase, result); putAll(alterDomainStatementTestCase, result); putAll(alterAggregateStatementTestCase, result); putAll(alterConversionStatementTestCase, result); putAll(alterCollationStatementTestCase, result); putAll(alterDefaultPrivilegesTestCase, result); putAll(alterForeignDataWrapperTestCase, result); putAll(alterForeignTableTestCase, result); putAll(alterGroupStatementTestCase, result); putAll(alterMaterializedViewStatementTestCase, result); putAll(createTextSearchStatementTestCases, result); putAll(alterTextSearchStatementTestCases, result); putAll(createLanguageStatementTestCases, result); putAll(alterLanguageStatementTestCases, result); putAll(dropLanguageStatementTestCases, result); putAll(showTableMetadataStatementTestCases, result); putAll(dropTrafficRuleStatementTestCases, result); putAll(labelStatementTestCases, result); putAll(unlabelStatementTestCases, result); putAll(prepareDistSQLStatementTestCases, result); putAll(applyDistSQLStatementTestCases, result); putAll(discardDistSQLStatementTestCases, result); putAll(helpStatementTestCases, result); putAll(showUnusedShardingAlgorithmsStatementTestCases, result); putAll(showUnusedShardingKeyGeneratorsStatementTestCases, result); putAll(renameTableStatementTestCases, result); putAll(showBinlogEventsStatementTestCases, result); putAll(createExtensionStatementTestCase, result); putAll(countSchemaRulesStatementTestCases, result); putAll(alterExtensionStatementTestCase, result); putAll(dropExtensionStatementTestCase, result); putAll(declareStatementTestCase, result); putAll(discardStatementTestCase, result); putAll(lockStatementTestCases, result); putAll(unlockStatementTestCases, result); putAll(exportSchemaConfigurationStatementTestCases, result); putAll(showRulesUsedResourceStatementTestCases, result); putAll(preparedStatementTestCases, result); putAll(showShardingTableRulesUsedKeyGeneratorStatementTestCases, result); putAll(setUserStatementTestCases, result); return result; } // CHECKSTYLE:ON private void putAll(final List<? extends SQLParserTestCase> sqlParserTestCases, final Map<String, SQLParserTestCase> target) { Map<String, SQLParserTestCase> sqlParserTestCaseMap = getSQLParserTestCases(sqlParserTestCases); Collection<String> sqlParserTestCaseIds = new HashSet<>(sqlParserTestCaseMap.keySet()); sqlParserTestCaseIds.retainAll(target.keySet()); Preconditions.checkState(sqlParserTestCaseIds.isEmpty(), "Find duplicated SQL Case IDs: %s", sqlParserTestCaseIds); target.putAll(sqlParserTestCaseMap); } private Map<String, SQLParserTestCase> getSQLParserTestCases(final List<? extends SQLParserTestCase> sqlParserTestCases) { Map<String, SQLParserTestCase> result = new HashMap<>(sqlParserTestCases.size(), 1); for (SQLParserTestCase each : sqlParserTestCases) { Preconditions.checkState(!result.containsKey(each.getSqlCaseId()), "Find duplicated SQL Case ID: %s", each.getSqlCaseId()); result.put(each.getSqlCaseId(), each); } return result; } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.apache.sling.scripting.sightly.java.compiler.impl; import java.util.Map; import java.util.Set; import org.apache.sling.scripting.sightly.compiler.RuntimeFunction; import org.apache.sling.scripting.sightly.compiler.expression.ExpressionNode; import org.apache.sling.scripting.sightly.compiler.expression.SideEffectVisitor; import org.apache.sling.scripting.sightly.compiler.expression.nodes.ArrayLiteral; import org.apache.sling.scripting.sightly.compiler.expression.nodes.BinaryOperation; import org.apache.sling.scripting.sightly.compiler.expression.nodes.BooleanConstant; import org.apache.sling.scripting.sightly.compiler.expression.nodes.Identifier; import org.apache.sling.scripting.sightly.compiler.expression.nodes.MapLiteral; import org.apache.sling.scripting.sightly.compiler.expression.nodes.NullLiteral; import org.apache.sling.scripting.sightly.compiler.expression.nodes.NumericConstant; import org.apache.sling.scripting.sightly.compiler.expression.nodes.PropertyAccess; import org.apache.sling.scripting.sightly.compiler.expression.nodes.RuntimeCall; import org.apache.sling.scripting.sightly.compiler.expression.nodes.StringConstant; import org.apache.sling.scripting.sightly.compiler.expression.nodes.TernaryOperator; import org.apache.sling.scripting.sightly.compiler.expression.nodes.UnaryOperation; import org.apache.sling.scripting.sightly.compiler.expression.nodes.UnaryOperator; import org.apache.sling.scripting.sightly.java.compiler.JavaEscapeUtils; import org.apache.sling.scripting.sightly.java.compiler.impl.operator.BinaryOpGen; import org.apache.sling.scripting.sightly.java.compiler.impl.operator.Operators; import org.apache.sling.scripting.sightly.java.compiler.impl.operator.UnaryOpGen; /** * Builds expressions within a sling source file. */ public final class ExpressionTranslator extends SideEffectVisitor { private final JavaSource source; private final VariableAnalyzer analyzer; private final TypeInfo typeInfo; private final Set<String> imports; private ExpressionTranslator(JavaSource source, VariableAnalyzer analyzer, TypeInfo typeInfo, Set<String> imports) { this.source = source; this.analyzer = analyzer; this.typeInfo= typeInfo; this.imports = imports; } public static void buildExpression(ExpressionNode node, JavaSource source, VariableAnalyzer analyzer, TypeInfo typeInfo, Set<String> imports) { ExpressionTranslator builder = new ExpressionTranslator(source, analyzer, typeInfo, imports); builder.traverse(node); } public void traverse(ExpressionNode node) { visit(node); } private void visit(ExpressionNode node) { node.accept(this); } @Override public void visit(PropertyAccess propertyAccess) { if (typeInfo.typeOf(propertyAccess.getTarget()) == Type.MAP) { //Special optimization for maps visit(propertyAccess.getTarget()); source.startCall(SourceGenConstants.MAP_GET, true); visit(propertyAccess.getProperty()); source.endCall(); } else { source.objectModel().startCall(SourceGenConstants.ROM_RESOLVE_PROPERTY, true); visit(propertyAccess.getTarget()); source.separateArgument(); visit(propertyAccess.getProperty()); source.endCall(); } } @Override public void visit(Identifier identifier) { String safeName = analyzer.assignedName(identifier.getName()); source.append(safeName); } @Override public void visit(StringConstant text) { source.stringLiteral(text.getText()); } @Override public void visit(BinaryOperation binaryOperation) { BinaryOpGen opGen = Operators.generatorFor(binaryOperation.getOperator()); source.startExpression(); opGen.generate(source, this, typeInfo.getTyped(binaryOperation.getLeftOperand()), typeInfo.getTyped(binaryOperation.getRightOperand())); source.endExpression(); } @Override public void visit(BooleanConstant booleanConstant) { source.append(Boolean.toString(booleanConstant.getValue())); } @Override public void visit(NumericConstant numericConstant) { source.append(numericConstant.getValue().toString()); //todo: check correctness } @Override public void visit(UnaryOperation unaryOperation) { UnaryOperator operator = unaryOperation.getOperator(); ExpressionNode operand = unaryOperation.getTarget(); UnaryOpGen unaryOpGen = Operators.generatorFor(operator); source.startExpression(); unaryOpGen.generate(source, this, typeInfo.getTyped(operand)); source.endExpression(); } @Override public void visit(TernaryOperator ternaryOperator) { GenHelper.generateTernary(source, this, typeInfo.getTyped(ternaryOperator.getCondition()), typeInfo.getTyped(ternaryOperator.getThenBranch()), typeInfo.getTyped(ternaryOperator.getElseBranch())); } @Override public void visit(RuntimeCall runtimeCall) { String runtimeCallName = runtimeCall.getFunctionName(); source.startMethodCall(SourceGenConstants.RENDER_CONTEXT_INSTANCE, SourceGenConstants.RUNTIME_CALL_METHOD) .stringLiteral(runtimeCallName); int index = 0; for (ExpressionNode arg : runtimeCall.getArguments()) { source.separateArgument(); if (RuntimeFunction.USE.equals(runtimeCallName) && index == 0) { if (arg instanceof StringConstant) { StringConstant constant = (StringConstant) arg; String className = constant.getText(); if (imports.contains(className)) { source.className(className.substring(className.lastIndexOf('.') + 1)); } else { visit(arg); } } else { visit(arg); } } else { visit(arg); } } source.endCall(); } @Override public void visit(MapLiteral mapLiteral) { source.startCall(SourceGenConstants.START_MAP_METHOD).endCall(); for (Map.Entry<String, ExpressionNode> entry : mapLiteral.getMap().entrySet()) { source.startCall(SourceGenConstants.MAP_TYPE_ADD, true) .stringLiteral(entry.getKey()) .separateArgument(); visit(entry.getValue()); source.endCall(); } } @Override public void visit(ArrayLiteral arrayLiteral) { source.startExpression().startArray(); boolean needsComma = false; for (ExpressionNode node : arrayLiteral.getItems()) { if (needsComma) { source.separateArgument(); } visit(node); needsComma = true; } source.endArray().endExpression(); } @Override public void visit(NullLiteral nullLiteral) { source.nullLiteral(); } public VariableAnalyzer getAnalyzer() { return analyzer; } }
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.passkit.protocol; import apple.foundation.NSArray; import apple.passkit.PKContact; import apple.passkit.PKPayment; import apple.passkit.PKPaymentAuthorizationResult; import apple.passkit.PKPaymentAuthorizationViewController; import apple.passkit.PKPaymentMethod; import apple.passkit.PKPaymentRequestCouponCodeUpdate; import apple.passkit.PKPaymentRequestMerchantSessionUpdate; import apple.passkit.PKPaymentRequestPaymentMethodUpdate; import apple.passkit.PKPaymentRequestShippingContactUpdate; import apple.passkit.PKPaymentRequestShippingMethodUpdate; import apple.passkit.PKPaymentSummaryItem; import apple.passkit.PKShippingMethod; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.ConstVoidPtr; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.ann.IsOptional; import org.moe.natj.objc.ann.ObjCBlock; import org.moe.natj.objc.ann.ObjCProtocolName; import org.moe.natj.objc.ann.Selector; @Generated @Library("PassKit") @Runtime(ObjCRuntime.class) @ObjCProtocolName("PKPaymentAuthorizationViewControllerDelegate") public interface PKPaymentAuthorizationViewControllerDelegate { /** * Deprecated delegate methods * These methods are deprecated. Please migrate away from them to their replacements. */ @IsOptional @Generated @Selector("paymentAuthorizationViewController:didAuthorizePayment:completion:") default void paymentAuthorizationViewControllerDidAuthorizePaymentCompletion( PKPaymentAuthorizationViewController controller, PKPayment payment, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidAuthorizePaymentCompletion") Block_paymentAuthorizationViewControllerDidAuthorizePaymentCompletion completion) { throw new java.lang.UnsupportedOperationException(); } @Generated @IsOptional @Selector("paymentAuthorizationViewController:didSelectPaymentMethod:completion:") default void paymentAuthorizationViewControllerDidSelectPaymentMethodCompletion( PKPaymentAuthorizationViewController controller, PKPaymentMethod paymentMethod, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidSelectPaymentMethodCompletion") Block_paymentAuthorizationViewControllerDidSelectPaymentMethodCompletion completion) { throw new java.lang.UnsupportedOperationException(); } @Generated @IsOptional @Deprecated @Selector("paymentAuthorizationViewController:didSelectShippingAddress:completion:") default void paymentAuthorizationViewControllerDidSelectShippingAddressCompletion( PKPaymentAuthorizationViewController controller, ConstVoidPtr address, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidSelectShippingAddressCompletion") Block_paymentAuthorizationViewControllerDidSelectShippingAddressCompletion completion) { throw new java.lang.UnsupportedOperationException(); } @Generated @IsOptional @Selector("paymentAuthorizationViewController:didSelectShippingContact:completion:") default void paymentAuthorizationViewControllerDidSelectShippingContactCompletion( PKPaymentAuthorizationViewController controller, PKContact contact, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidSelectShippingContactCompletion") Block_paymentAuthorizationViewControllerDidSelectShippingContactCompletion completion) { throw new java.lang.UnsupportedOperationException(); } @Generated @IsOptional @Selector("paymentAuthorizationViewController:didSelectShippingMethod:completion:") default void paymentAuthorizationViewControllerDidSelectShippingMethodCompletion( PKPaymentAuthorizationViewController controller, PKShippingMethod shippingMethod, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidSelectShippingMethodCompletion") Block_paymentAuthorizationViewControllerDidSelectShippingMethodCompletion completion) { throw new java.lang.UnsupportedOperationException(); } /** * Sent to the delegate when payment authorization is finished. This may occur when * the user cancels the request, or after the PKPaymentAuthorizationStatus parameter of the * paymentAuthorizationViewController:didAuthorizePayment:completion: has been shown to the user. * <p> * The delegate is responsible for dismissing the view controller in this method. */ @Generated @Selector("paymentAuthorizationViewControllerDidFinish:") void paymentAuthorizationViewControllerDidFinish(PKPaymentAuthorizationViewController controller); /** * Sent to the delegate before the payment is authorized, but after the user has authenticated using * passcode or Touch ID. Optional. */ @Generated @IsOptional @Selector("paymentAuthorizationViewControllerWillAuthorizePayment:") default void paymentAuthorizationViewControllerWillAuthorizePayment( PKPaymentAuthorizationViewController controller) { throw new java.lang.UnsupportedOperationException(); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidAuthorizePaymentCompletion { @Generated void call_paymentAuthorizationViewControllerDidAuthorizePaymentCompletion(@NInt long status); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidSelectPaymentMethodCompletion { @Generated void call_paymentAuthorizationViewControllerDidSelectPaymentMethodCompletion( NSArray<? extends PKPaymentSummaryItem> summaryItems); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidSelectShippingAddressCompletion { @Generated void call_paymentAuthorizationViewControllerDidSelectShippingAddressCompletion(@NInt long status, NSArray<? extends PKShippingMethod> shippingMethods, NSArray<? extends PKPaymentSummaryItem> summaryItems); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidSelectShippingContactCompletion { @Generated void call_paymentAuthorizationViewControllerDidSelectShippingContactCompletion(@NInt long status, NSArray<? extends PKShippingMethod> shippingMethods, NSArray<? extends PKPaymentSummaryItem> summaryItems); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidSelectShippingMethodCompletion { @Generated void call_paymentAuthorizationViewControllerDidSelectShippingMethodCompletion(@NInt long status, NSArray<? extends PKPaymentSummaryItem> summaryItems); } /** * Sent to the delegate after the user has acted on the payment request. The application * should inspect the payment to determine whether the payment request was authorized. * <p> * If the application requested a shipping address then the full addresses is now part of the payment. * <p> * The delegate must call completion with an appropriate authorization status, as may be determined * by submitting the payment credential to a processing gateway for payment authorization. */ @Generated @IsOptional @Selector("paymentAuthorizationViewController:didAuthorizePayment:handler:") default void paymentAuthorizationViewControllerDidAuthorizePaymentHandler( PKPaymentAuthorizationViewController controller, PKPayment payment, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidAuthorizePaymentHandler") Block_paymentAuthorizationViewControllerDidAuthorizePaymentHandler completion) { throw new java.lang.UnsupportedOperationException(); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidAuthorizePaymentHandler { @Generated void call_paymentAuthorizationViewControllerDidAuthorizePaymentHandler(PKPaymentAuthorizationResult result); } /** * Sent when the user has selected a new payment card. Use this delegate callback if you need to * update the summary items in response to the card type changing (for example, applying credit card surcharges) * <p> * The delegate will receive no further callbacks except paymentAuthorizationViewControllerDidFinish: * until it has invoked the completion block. */ @Generated @IsOptional @Selector("paymentAuthorizationViewController:didSelectPaymentMethod:handler:") default void paymentAuthorizationViewControllerDidSelectPaymentMethodHandler( PKPaymentAuthorizationViewController controller, PKPaymentMethod paymentMethod, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidSelectPaymentMethodHandler") Block_paymentAuthorizationViewControllerDidSelectPaymentMethodHandler completion) { throw new java.lang.UnsupportedOperationException(); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidSelectPaymentMethodHandler { @Generated void call_paymentAuthorizationViewControllerDidSelectPaymentMethodHandler( PKPaymentRequestPaymentMethodUpdate update); } /** * Sent when the user has selected a new shipping address. The delegate should inspect the * address and must invoke the completion block with an updated array of PKPaymentSummaryItem objects. * <p> * The delegate will receive no further callbacks except paymentAuthorizationViewControllerDidFinish: * until it has invoked the completion block. */ @Generated @IsOptional @Selector("paymentAuthorizationViewController:didSelectShippingContact:handler:") default void paymentAuthorizationViewControllerDidSelectShippingContactHandler( PKPaymentAuthorizationViewController controller, PKContact contact, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidSelectShippingContactHandler") Block_paymentAuthorizationViewControllerDidSelectShippingContactHandler completion) { throw new java.lang.UnsupportedOperationException(); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidSelectShippingContactHandler { @Generated void call_paymentAuthorizationViewControllerDidSelectShippingContactHandler( PKPaymentRequestShippingContactUpdate update); } /** * Sent when the user has selected a new shipping method. The delegate should determine * shipping costs based on the shipping method and either the shipping address supplied in the original * PKPaymentRequest or the address fragment provided by the last call to paymentAuthorizationViewController: * didSelectShippingAddress:completion:. * <p> * The delegate must invoke the completion block with an updated array of PKPaymentSummaryItem objects. * <p> * The delegate will receive no further callbacks except paymentAuthorizationViewControllerDidFinish: * until it has invoked the completion block. */ @Generated @IsOptional @Selector("paymentAuthorizationViewController:didSelectShippingMethod:handler:") default void paymentAuthorizationViewControllerDidSelectShippingMethodHandler( PKPaymentAuthorizationViewController controller, PKShippingMethod shippingMethod, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidSelectShippingMethodHandler") Block_paymentAuthorizationViewControllerDidSelectShippingMethodHandler completion) { throw new java.lang.UnsupportedOperationException(); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidSelectShippingMethodHandler { @Generated void call_paymentAuthorizationViewControllerDidSelectShippingMethodHandler( PKPaymentRequestShippingMethodUpdate update); } @Generated @IsOptional @Selector("paymentAuthorizationViewController:didRequestMerchantSessionUpdate:") default void paymentAuthorizationViewControllerDidRequestMerchantSessionUpdate( PKPaymentAuthorizationViewController controller, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidRequestMerchantSessionUpdate") Block_paymentAuthorizationViewControllerDidRequestMerchantSessionUpdate handler) { throw new java.lang.UnsupportedOperationException(); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidRequestMerchantSessionUpdate { @Generated void call_paymentAuthorizationViewControllerDidRequestMerchantSessionUpdate( PKPaymentRequestMerchantSessionUpdate update); } @Generated @IsOptional @Selector("paymentAuthorizationViewController:didChangeCouponCode:handler:") default void paymentAuthorizationViewControllerDidChangeCouponCodeHandler( PKPaymentAuthorizationViewController controller, String couponCode, @ObjCBlock(name = "call_paymentAuthorizationViewControllerDidChangeCouponCodeHandler") Block_paymentAuthorizationViewControllerDidChangeCouponCodeHandler completion) { throw new java.lang.UnsupportedOperationException(); } @Runtime(ObjCRuntime.class) @Generated public interface Block_paymentAuthorizationViewControllerDidChangeCouponCodeHandler { @Generated void call_paymentAuthorizationViewControllerDidChangeCouponCodeHandler(PKPaymentRequestCouponCodeUpdate update); } }
package fi.csc.chipster.auth.resource; import java.io.IOException; import java.time.Duration; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.hibernate.Session; import fi.csc.chipster.auth.jaas.JaasAuthenticationProvider; import fi.csc.chipster.auth.model.Role; import fi.csc.chipster.auth.model.User; import fi.csc.chipster.auth.model.UserId; import fi.csc.chipster.auth.model.UserToken; import fi.csc.chipster.rest.Config; import fi.csc.chipster.rest.exception.NotAuthorizedException; import fi.csc.chipster.rest.exception.TooManyRequestsException; import fi.csc.chipster.rest.hibernate.HibernateUtil; import fi.csc.chipster.rest.hibernate.HibernateUtil.HibernateRunnable; import fi.csc.chipster.rest.token.BasicAuthParser; import fi.csc.chipster.rest.token.TokenRequestFilter; import fi.csc.chipster.sessionworker.RequestThrottle; import jakarta.annotation.Priority; import jakarta.ws.rs.ForbiddenException; import jakarta.ws.rs.Priorities; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.container.ContainerRequestFilter; import jakarta.ws.rs.ext.Provider; /** * @author klemela * */ @Provider @Priority(Priorities.AUTHENTICATION) // execute this filter before others public class AuthenticationRequestFilter implements ContainerRequestFilter { private static final Logger logger = LogManager.getLogger(); private static final String CONF_PASSWORD_THROTTLE_PERIOD = "auth-password-throttle-period"; private static final String CONF_PASSWORD_THROTTLE_REQEUST_COUNT = "auth-password-throttle-request-count"; private static final String CONF_SERVER_PASSWORD_THROTTLE_PERIOD = "auth-server-password-throttle-period"; private static final String CONF_SERVER_PASSWORD_THROTTLE_REQEUST_COUNT = "auth-server-password-throttle-request-count"; private static final String CONF_SERVER_PASSWORD_THROTTLE_LIST = "auth-server-password-throttle-list"; private HibernateUtil hibernate; @SuppressWarnings("unused") private Config config; private UserTable userTable; private Map<String, String> serviceAccounts; private Set<String> adminAccounts; private final String jaasPrefix; private JaasAuthenticationProvider authenticationProvider; private HashMap<String, String> monitoringAccounts; private AuthTokens tokenTable; private RequestThrottle passwordThrottle; private HashSet<String> serverThrottleList; private RequestThrottle serverPasswordThrottle; public AuthenticationRequestFilter(HibernateUtil hibernate, Config config, UserTable userTable, AuthTokens tokenTable, JaasAuthenticationProvider jaasAuthProvider) throws IOException { this.hibernate = hibernate; this.config = config; this.userTable = userTable; this.tokenTable = tokenTable; this.authenticationProvider = jaasAuthProvider; serviceAccounts = config.getServicePasswords(); adminAccounts = config.getAdminAccounts(); jaasPrefix = config.getString(Config.KEY_AUTH_JAAS_PREFIX); String monitoringPassword = config.getString(Config.KEY_MONITORING_PASSWORD); if (config.getDefault(Config.KEY_MONITORING_PASSWORD).equals(monitoringPassword)) { logger.warn("default password for username " + Role.MONITORING); } monitoringAccounts = new HashMap<String, String>() {{ put(Role.MONITORING, monitoringPassword); }}; /* Each replica counts its own request counts * * This is probably fine for small number of replicas. With dozens of replicas * you would need a centralized storage for this. */ int throttlePeriod = config.getInt(CONF_PASSWORD_THROTTLE_PERIOD); int throttleRequestCount = config.getInt(CONF_PASSWORD_THROTTLE_REQEUST_COUNT); int serverThrottlePeriod = config.getInt(CONF_SERVER_PASSWORD_THROTTLE_PERIOD); int serverThrottleRequestCount = config.getInt(CONF_SERVER_PASSWORD_THROTTLE_REQEUST_COUNT); serverThrottleList = new HashSet<String>(Arrays.asList(config.getString(CONF_SERVER_PASSWORD_THROTTLE_LIST).split(" "))); this.passwordThrottle = new RequestThrottle(Duration.ofSeconds(throttlePeriod), throttleRequestCount); this.serverPasswordThrottle = new RequestThrottle(Duration.ofSeconds(serverThrottlePeriod), serverThrottleRequestCount); } @Override public void filter(ContainerRequestContext requestContext) throws IOException { if ("OPTIONS".equals(requestContext.getMethod())) { // CORS preflight checks require unauthenticated OPTIONS return; } String authHeader = requestContext.getHeaderString("authorization"); if (authHeader == null) { // OidcResource needs unauthenticated access requestContext.setSecurityContext(new AuthSecurityContext(new AuthPrincipal(Role.UNAUTHENTICATED), requestContext.getSecurityContext())); return; } if (authHeader.startsWith("Basic ") || authHeader.startsWith("basic ")) { BasicAuthParser parser = new BasicAuthParser(authHeader); AuthPrincipal principal = null; if (TokenRequestFilter.TOKEN_USER.equals(parser.getUsername())) { // throws an exception if fails principal = tokenAuthentication(parser.getPassword()); } else { // throws an exception if fails principal = passwordAuthentication(parser.getUsername(), parser.getPassword()); } // login ok AuthSecurityContext sc = new AuthSecurityContext(principal, requestContext.getSecurityContext()); requestContext.setSecurityContext(sc); } else { throw new NotAuthorizedException("unknown authorization header type"); } } public AuthPrincipal tokenAuthentication(String jwsString) { // throws if fails UserToken token = tokenTable.validateUserToken(jwsString); return new AuthPrincipal(token.getUsername(), jwsString, token.getRoles()); } private AuthPrincipal passwordAuthentication(String username, String password) { /* * Slow down brute force attacks * * It could be possible to exhaust memory by trying different usernames. * Maybe add another throttle based on IP address. Finally, * respond with CONF_PASSWORD_THROTTLE_PERIOD to everyone after the list size grows * really large. */ Duration retryAfter = null; // throttle less server accounts and unit test accounts if (serverThrottleList.contains(username) || serviceAccounts.containsKey(username)) { // we can trust that servers have proper passwords retryAfter = serverPasswordThrottle.throttle(username); } else { retryAfter = passwordThrottle.throttle(username); } if (!retryAfter.isZero()) { // + 1 to round up long ceilSeconds = retryAfter.getSeconds() + 1; logger.warn("throttling password requests for username '" + username + "'"); throw new TooManyRequestsException(ceilSeconds); } // check that there is no extra white space in the username, because if authenticationProvider accepts it, // it would create a new user in Chipster if (!username.trim().equals(username)) { throw new ForbiddenException("white space in username"); } if (serviceAccounts.containsKey(username)) { if (serviceAccounts.get(username).equals(password)) { // authenticate with username/password ok return new AuthPrincipal(username, getRoles(username)); } // don't let other providers to authenticate internal usernames throw new ForbiddenException("wrong password"); } if (monitoringAccounts.containsKey(username)) { if (monitoringAccounts.get(username).equals(password)) { // authenticate with username/password ok return new AuthPrincipal(username, getRoles(username)); } // don't let other providers to authenticate internal usernames throw new ForbiddenException("wrong password"); } // allow both plain username "jdoe" or userId "jaas/jdoe" String jaasUsername; try { // throws if username is not a userId UserId userId = new UserId(username); if (userId.getAuth().equals(jaasPrefix)) { // jaas userId (e.g. "jaas/jdoe"), login without the prefix jaasUsername = userId.getUsername(); } else { // userId, but not from jaas (e.g. "sso/jdoe"), no point to try for jaas jaasUsername = null; } } catch (IllegalArgumentException e) { // not a userId but only a username (e.g. "jdoe"), but that's fine jaasUsername = username; } if (jaasUsername != null && authenticationProvider.authenticate(jaasUsername, password.toCharArray())) { User user = addOrUpdateUser(jaasUsername); // authenticate with username/password ok return new AuthPrincipal(user.getUserId().toUserIdString(), getRoles(username)); } throw new ForbiddenException("wrong username or password"); } private User addOrUpdateUser(String username) { User user = new User(jaasPrefix, username, null, null, username); hibernate.runInTransaction(new HibernateRunnable<Void>() { @Override public Void run(Session hibernateSession) { userTable.addOrUpdate(user, hibernateSession); return null; } }); return user; } public HashSet<String> getRoles(String username) { HashSet<String> roles = new HashSet<>(); roles.add(Role.PASSWORD); if (serviceAccounts.keySet().contains(username)) { // minimal access rights if SingleShotComp service account is used someday if (!Role.SINGLE_SHOT_COMP.equals(username)) { roles.add(Role.SERVER); } roles.add(username); } else if (monitoringAccounts.containsKey(username)) { roles.add(Role.MONITORING); } else { roles.add(Role.CLIENT); if (adminAccounts.contains(username)) { roles.add(Role.ADMIN); } } return roles; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.metadata; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse.IndexResult; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.DataStreamTestHelper; import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInProgressException; import org.elasticsearch.snapshots.SnapshotInfoTests; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.hamcrest.CoreMatchers; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_VERSION_CREATED; import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.INDEX_CLOSED_BLOCK; import static org.elasticsearch.cluster.metadata.MetadataIndexStateService.INDEX_CLOSED_BLOCK_ID; import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class MetadataIndexStateServiceTests extends ESTestCase { public void testCloseRoutingTable() { final Set<Index> nonBlockedIndices = new HashSet<>(); final Map<Index, ClusterBlock> blockedIndices = new HashMap<>(); final Map<Index, IndexResult> results = new HashMap<>(); ClusterState state = ClusterState.builder(new ClusterName("testCloseRoutingTable")).build(); for (int i = 0; i < randomIntBetween(1, 25); i++) { final String indexName = "index-" + i; if (randomBoolean()) { state = addOpenedIndex(indexName, randomIntBetween(1, 5), randomIntBetween(0, 5), state); nonBlockedIndices.add(state.metadata().index(indexName).getIndex()); } else { final ClusterBlock closingBlock = MetadataIndexStateService.createIndexClosingBlock(); state = addBlockedIndex(indexName, randomIntBetween(1, 5), randomIntBetween(0, 5), state, closingBlock); final Index index = state.metadata().index(indexName).getIndex(); blockedIndices.put(index, closingBlock); if (randomBoolean()) { results.put(index, new CloseIndexResponse.IndexResult(index)); } else { results.put(index, new CloseIndexResponse.IndexResult(index, new Exception("test"))); } } } final ClusterState updatedState = MetadataIndexStateService.closeRoutingTable(state, blockedIndices, results).v1(); assertThat(updatedState.metadata().indices().size(), equalTo(nonBlockedIndices.size() + blockedIndices.size())); for (Index nonBlockedIndex : nonBlockedIndices) { assertIsOpened(nonBlockedIndex.getName(), updatedState); assertThat(updatedState.blocks().hasIndexBlockWithId(nonBlockedIndex.getName(), INDEX_CLOSED_BLOCK_ID), is(false)); } for (Index blockedIndex : blockedIndices.keySet()) { if (results.get(blockedIndex).hasFailures() == false) { assertIsClosed(blockedIndex.getName(), updatedState); } else { assertIsOpened(blockedIndex.getName(), updatedState); assertThat(updatedState.blocks().hasIndexBlockWithId(blockedIndex.getName(), INDEX_CLOSED_BLOCK_ID), is(true)); } } } public void testCloseRoutingTableWithRestoredIndex() { ClusterState state = ClusterState.builder(new ClusterName("testCloseRoutingTableWithRestoredIndex")).build(); String indexName = "restored-index"; ClusterBlock block = MetadataIndexStateService.createIndexClosingBlock(); state = addRestoredIndex(indexName, randomIntBetween(1, 5), randomIntBetween(0, 5), state); state = ClusterState.builder(state) .blocks(ClusterBlocks.builder().blocks(state.blocks()).addIndexBlock(indexName, block)) .build(); final Index index = state.metadata().index(indexName).getIndex(); final ClusterState updatedState = MetadataIndexStateService.closeRoutingTable(state, singletonMap(index, block), singletonMap(index, new IndexResult(index))) .v1(); assertIsOpened(index.getName(), updatedState); assertThat(updatedState.blocks().hasIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID), is(true)); } public void testCloseRoutingTableWithSnapshottedIndex() { ClusterState state = ClusterState.builder(new ClusterName("testCloseRoutingTableWithSnapshottedIndex")).build(); String indexName = "snapshotted-index"; ClusterBlock block = MetadataIndexStateService.createIndexClosingBlock(); state = addSnapshotIndex(indexName, randomIntBetween(1, 5), randomIntBetween(0, 5), state); state = ClusterState.builder(state) .blocks(ClusterBlocks.builder().blocks(state.blocks()).addIndexBlock(indexName, block)) .build(); final Index index = state.metadata().index(indexName).getIndex(); final ClusterState updatedState = MetadataIndexStateService.closeRoutingTable(state, singletonMap(index, block), singletonMap(index, new IndexResult(index))) .v1(); assertIsOpened(index.getName(), updatedState); assertThat(updatedState.blocks().hasIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID), is(true)); } public void testAddIndexClosedBlocks() { final ClusterState initialState = ClusterState.builder(new ClusterName("testAddIndexClosedBlocks")).build(); { final Map<Index, ClusterBlock> blockedIndices = new HashMap<>(); Index[] indices = new Index[]{new Index("_name", "_uid")}; expectThrows(IndexNotFoundException.class, () -> MetadataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, initialState)); assertTrue(blockedIndices.isEmpty()); } { final Map<Index, ClusterBlock> blockedIndices = new HashMap<>(); Index[] indices = Index.EMPTY_ARRAY; ClusterState updatedState = MetadataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, initialState); assertSame(initialState, updatedState); assertTrue(blockedIndices.isEmpty()); } { final Map<Index, ClusterBlock> blockedIndices = new HashMap<>(); ClusterState state = addClosedIndex("closed", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); Index[] indices = new Index[]{state.metadata().index("closed").getIndex()}; ClusterState updatedState = MetadataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, state); assertSame(state, updatedState); assertTrue(blockedIndices.isEmpty()); } { final Map<Index, ClusterBlock> blockedIndices = new HashMap<>(); ClusterState state = addClosedIndex("closed", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); state = addOpenedIndex("opened", randomIntBetween(1, 3), randomIntBetween(0, 3), state); Index[] indices = new Index[]{state.metadata().index("opened").getIndex(), state.metadata().index("closed").getIndex()}; ClusterState updatedState = MetadataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, state); assertNotSame(state, updatedState); Index opened = updatedState.metadata().index("opened").getIndex(); assertTrue(blockedIndices.containsKey(opened)); assertHasBlock("opened", updatedState, blockedIndices.get(opened)); Index closed = updatedState.metadata().index("closed").getIndex(); assertFalse(blockedIndices.containsKey(closed)); } { IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { ClusterState state = addRestoredIndex("restored", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); if (randomBoolean()) { state = addOpenedIndex("opened", randomIntBetween(1, 3), randomIntBetween(0, 3), state); } if (randomBoolean()) { state = addOpenedIndex("closed", randomIntBetween(1, 3), randomIntBetween(0, 3), state); } Index[] indices = new Index[]{state.metadata().index("restored").getIndex()}; MetadataIndexStateService.addIndexClosedBlocks(indices, unmodifiableMap(emptyMap()), state); }); assertThat(exception.getMessage(), containsString("Cannot close indices that are being restored: [[restored]]")); } { SnapshotInProgressException exception = expectThrows(SnapshotInProgressException.class, () -> { ClusterState state = addSnapshotIndex("snapshotted", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); if (randomBoolean()) { state = addOpenedIndex("opened", randomIntBetween(1, 3), randomIntBetween(0, 3), state); } if (randomBoolean()) { state = addOpenedIndex("closed", randomIntBetween(1, 3), randomIntBetween(0, 3), state); } Index[] indices = new Index[]{state.metadata().index("snapshotted").getIndex()}; MetadataIndexStateService.addIndexClosedBlocks(indices, unmodifiableMap(emptyMap()), state); }); assertThat(exception.getMessage(), containsString("Cannot close indices that are being snapshotted: [[snapshotted]]")); } { final Map<Index, ClusterBlock> blockedIndices = new HashMap<>(); ClusterState state = addOpenedIndex("index-1", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); state = addOpenedIndex("index-2", randomIntBetween(1, 3), randomIntBetween(0, 3), state); state = addOpenedIndex("index-3", randomIntBetween(1, 3), randomIntBetween(0, 3), state); Index index1 = state.metadata().index("index-1").getIndex(); Index index2 = state.metadata().index("index-2").getIndex(); Index index3 = state.metadata().index("index-3").getIndex(); Index[] indices = new Index[]{index1, index2, index3}; ClusterState updatedState = MetadataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, state); assertNotSame(state, updatedState); for (Index index : indices) { assertTrue(blockedIndices.containsKey(index)); assertHasBlock(index.getName(), updatedState, blockedIndices.get(index)); } } } public void testAddIndexClosedBlocksReusesBlocks() { ClusterState state = ClusterState.builder(new ClusterName("testAddIndexClosedBlocksReuseBlocks")).build(); state = addOpenedIndex("test", randomIntBetween(1, 3), randomIntBetween(0, 3), state); Index test = state.metadata().index("test").getIndex(); Index[] indices = new Index[]{test}; final Map<Index, ClusterBlock> blockedIndices = new HashMap<>(); state = MetadataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, state); assertTrue(blockedIndices.containsKey(test)); assertHasBlock(test.getName(), state, blockedIndices.get(test)); final Map<Index, ClusterBlock> blockedIndices2 = new HashMap<>(); state = MetadataIndexStateService.addIndexClosedBlocks(indices, blockedIndices2, state); assertTrue(blockedIndices2.containsKey(test)); assertHasBlock(test.getName(), state, blockedIndices2.get(test)); assertEquals(blockedIndices.get(test), blockedIndices2.get(test)); } public void testIsIndexVerifiedBeforeClosed() { final ClusterState initialState = ClusterState.builder(new ClusterName("testIsIndexMetadataClosed")).build(); { String indexName = "open"; ClusterState state = addOpenedIndex(indexName, randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); assertFalse(MetadataIndexStateService.isIndexVerifiedBeforeClosed(state.getMetadata().index(indexName))); } { String indexName = "closed"; ClusterState state = addClosedIndex(indexName, randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); assertTrue(MetadataIndexStateService.isIndexVerifiedBeforeClosed(state.getMetadata().index(indexName))); } { String indexName = "closed-no-setting"; IndexMetadata indexMetadata = IndexMetadata.builder(indexName) .state(IndexMetadata.State.CLOSE) .creationDate(randomNonNegativeLong()) .settings(Settings.builder() .put(SETTING_VERSION_CREATED, Version.CURRENT) .put(SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 3)) .put(SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 3))) .build(); assertFalse(MetadataIndexStateService.isIndexVerifiedBeforeClosed(indexMetadata)); } } public void testCloseFailedIfBlockDisappeared() { ClusterState state = ClusterState.builder(new ClusterName("failedIfBlockDisappeared")).build(); Map<Index, ClusterBlock> blockedIndices = new HashMap<>(); int numIndices = between(1, 10); Set<Index> disappearedIndices = new HashSet<>(); Map<Index, IndexResult> verifyResults = new HashMap<>(); for (int i = 0; i < numIndices; i++) { String indexName = "test-" + i; state = addOpenedIndex(indexName, randomIntBetween(1, 3), randomIntBetween(0, 3), state); Index index = state.metadata().index(indexName).getIndex(); state = MetadataIndexStateService.addIndexClosedBlocks(new Index[]{index}, blockedIndices, state); if (randomBoolean()) { state = ClusterState.builder(state) .blocks(ClusterBlocks.builder().blocks(state.blocks()).removeIndexBlocks(indexName).build()) .build(); disappearedIndices.add(index); } verifyResults.put(index, new IndexResult(index)); } Collection<IndexResult> closingResults = MetadataIndexStateService.closeRoutingTable(state, blockedIndices, unmodifiableMap(verifyResults)).v2(); assertThat(closingResults, hasSize(numIndices)); Set<Index> failedIndices = closingResults.stream().filter(IndexResult::hasFailures) .map(IndexResult::getIndex).collect(Collectors.toSet()); assertThat(failedIndices, equalTo(disappearedIndices)); } public void testCloseCurrentWriteIndexForDataStream() { int numDataStreams = randomIntBetween(1, 3); List<Tuple<String, Integer>> dataStreamsToCreate = new ArrayList<>(); List<String> writeIndices = new ArrayList<>(); for (int k = 0; k < numDataStreams; k++) { String dataStreamName = randomAlphaOfLength(6).toLowerCase(Locale.ROOT); int numBackingIndices = randomIntBetween(1, 5); dataStreamsToCreate.add(new Tuple<>(dataStreamName, numBackingIndices)); writeIndices.add(DataStream.getDefaultBackingIndexName(dataStreamName, numBackingIndices)); } ClusterState cs = DataStreamTestHelper.getClusterStateWithDataStreams(dataStreamsToCreate, List.of()); ClusterService clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(cs); List<String> indicesToDelete = randomSubsetOf(randomIntBetween(1, numDataStreams), writeIndices); Index[] indicesToDeleteArray = new Index[indicesToDelete.size()]; for (int k = 0; k < indicesToDelete.size(); k++) { Index indexToDelete = cs.metadata().index(indicesToDelete.get(k)).getIndex(); indicesToDeleteArray[k] = indexToDelete; } MetadataIndexStateService service = new MetadataIndexStateService(clusterService, null, null, null, null, null, null); CloseIndexClusterStateUpdateRequest request = new CloseIndexClusterStateUpdateRequest(0L).indices(indicesToDeleteArray); Exception e = expectThrows(IllegalArgumentException.class, () -> service.closeIndices(request, null)); assertThat(e.getMessage(), CoreMatchers.containsString("cannot close the following data stream write indices [" + Strings.collectionToCommaDelimitedString(indicesToDelete) + "]")); } public static ClusterState addOpenedIndex(final String index, final int numShards, final int numReplicas, final ClusterState state) { return addIndex(state, index, numShards, numReplicas, IndexMetadata.State.OPEN, null); } public static ClusterState addClosedIndex(final String index, final int numShards, final int numReplicas, final ClusterState state) { return addIndex(state, index, numShards, numReplicas, IndexMetadata.State.CLOSE, INDEX_CLOSED_BLOCK); } private static ClusterState addBlockedIndex(final String index, final int numShards, final int numReplicas, final ClusterState state, final ClusterBlock closingBlock) { return addIndex(state, index, numShards, numReplicas, IndexMetadata.State.OPEN, closingBlock); } private static ClusterState addRestoredIndex(final String index, final int numShards, final int numReplicas, final ClusterState state) { ClusterState newState = addOpenedIndex(index, numShards, numReplicas, state); final ImmutableOpenMap.Builder<ShardId, RestoreInProgress.ShardRestoreStatus> shardsBuilder = ImmutableOpenMap.builder(); for (ShardRouting shardRouting : newState.routingTable().index(index).randomAllActiveShardsIt()) { shardsBuilder.put(shardRouting.shardId(), new RestoreInProgress.ShardRestoreStatus(shardRouting.currentNodeId())); } final Snapshot snapshot = new Snapshot(randomAlphaOfLength(10), new SnapshotId(randomAlphaOfLength(5), randomAlphaOfLength(5))); final RestoreInProgress.Entry entry = new RestoreInProgress.Entry("_uuid", snapshot, RestoreInProgress.State.INIT, Collections.singletonList(index), shardsBuilder.build()); return ClusterState.builder(newState) .putCustom(RestoreInProgress.TYPE, new RestoreInProgress.Builder().add(entry).build()) .build(); } private static ClusterState addSnapshotIndex(final String index, final int numShards, final int numReplicas, final ClusterState state) { ClusterState newState = addOpenedIndex(index, numShards, numReplicas, state); final ImmutableOpenMap.Builder<ShardId, SnapshotsInProgress.ShardSnapshotStatus> shardsBuilder = ImmutableOpenMap.builder(); for (ShardRouting shardRouting : newState.routingTable().index(index).randomAllActiveShardsIt()) { shardsBuilder.put(shardRouting.shardId(), new SnapshotsInProgress.ShardSnapshotStatus(shardRouting.currentNodeId(), "1")); } final Snapshot snapshot = new Snapshot(randomAlphaOfLength(10), new SnapshotId(randomAlphaOfLength(5), randomAlphaOfLength(5))); final SnapshotsInProgress.Entry entry = new SnapshotsInProgress.Entry(snapshot, randomBoolean(), false, SnapshotsInProgress.State.INIT, Collections.singletonList(new IndexId(index, index)), Collections.emptyList(), randomNonNegativeLong(), randomLong(), shardsBuilder.build(), null, SnapshotInfoTests.randomUserMetadata(), VersionUtils.randomVersion(random())); return ClusterState.builder(newState).putCustom(SnapshotsInProgress.TYPE, SnapshotsInProgress.of(List.of(entry))).build(); } private static ClusterState addIndex(final ClusterState currentState, final String index, final int numShards, final int numReplicas, final IndexMetadata.State state, @Nullable final ClusterBlock block) { final Settings.Builder settings = Settings.builder() .put(SETTING_VERSION_CREATED, Version.CURRENT) .put(SETTING_NUMBER_OF_SHARDS, numShards) .put(SETTING_NUMBER_OF_REPLICAS, numReplicas); if (state == IndexMetadata.State.CLOSE) { settings.put(MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING.getKey(), true); } final IndexMetadata indexMetadata = IndexMetadata.builder(index) .state(state) .creationDate(randomNonNegativeLong()) .settings(settings) .build(); final ClusterState.Builder clusterStateBuilder = ClusterState.builder(currentState); clusterStateBuilder.metadata(Metadata.builder(currentState.metadata()).put(indexMetadata, true)); final IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(indexMetadata.getIndex()); for (int j = 0; j < indexMetadata.getNumberOfShards(); j++) { ShardId shardId = new ShardId(indexMetadata.getIndex(), j); IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); indexShardRoutingBuilder.addShard(newShardRouting(shardId, randomAlphaOfLength(10), true, ShardRoutingState.STARTED)); for (int k = 0; k < indexMetadata.getNumberOfReplicas(); k++) { indexShardRoutingBuilder.addShard(newShardRouting(shardId, randomAlphaOfLength(10), false, ShardRoutingState.STARTED)); } indexRoutingTable.addIndexShard(indexShardRoutingBuilder.build()); } clusterStateBuilder.routingTable(RoutingTable.builder(currentState.routingTable()).add(indexRoutingTable).build()); if (block != null) { clusterStateBuilder.blocks(ClusterBlocks.builder().blocks(currentState.blocks()).addIndexBlock(index, block)); } return clusterStateBuilder.build(); } private static void assertIsOpened(final String indexName, final ClusterState clusterState) { final IndexMetadata indexMetadata = clusterState.metadata().indices().get(indexName); assertThat(indexMetadata.getState(), is(IndexMetadata.State.OPEN)); assertThat(indexMetadata.getSettings().hasValue(MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING.getKey()), is(false)); assertThat(clusterState.routingTable().index(indexName), notNullValue()); assertThat(clusterState.blocks().hasIndexBlock(indexName, MetadataIndexStateService.INDEX_CLOSED_BLOCK), is(false)); assertThat(clusterState.routingTable().index(indexName), notNullValue()); } private static void assertIsClosed(final String indexName, final ClusterState clusterState) { final IndexMetadata indexMetadata = clusterState.metadata().indices().get(indexName); assertThat(indexMetadata.getState(), is(IndexMetadata.State.CLOSE)); final Settings indexSettings = indexMetadata.getSettings(); assertThat(indexSettings.hasValue(MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING.getKey()), is(true)); assertThat(indexSettings.getAsBoolean(MetadataIndexStateService.VERIFIED_BEFORE_CLOSE_SETTING.getKey(), false), is(true)); assertThat(clusterState.blocks().hasIndexBlock(indexName, MetadataIndexStateService.INDEX_CLOSED_BLOCK), is(true)); assertThat("Index " + indexName + " must have only 1 block with [id=" + MetadataIndexStateService.INDEX_CLOSED_BLOCK_ID + "]", clusterState.blocks().indices().getOrDefault(indexName, emptySet()).stream() .filter(clusterBlock -> clusterBlock.id() == MetadataIndexStateService.INDEX_CLOSED_BLOCK_ID).count(), equalTo(1L)); final IndexRoutingTable indexRoutingTable = clusterState.routingTable().index(indexName); assertThat(indexRoutingTable, notNullValue()); for(IndexShardRoutingTable shardRoutingTable : indexRoutingTable) { assertThat(shardRoutingTable.shards().stream().allMatch(ShardRouting::unassigned), is(true)); assertThat(shardRoutingTable.shards().stream().map(ShardRouting::unassignedInfo).map(UnassignedInfo::getReason) .allMatch(info -> info == UnassignedInfo.Reason.INDEX_CLOSED), is(true)); } } private static void assertHasBlock(final String indexName, final ClusterState clusterState, final ClusterBlock closingBlock) { assertThat(clusterState.blocks().hasIndexBlock(indexName, closingBlock), is(true)); assertThat("Index " + indexName + " must have only 1 block with [id=" + MetadataIndexStateService.INDEX_CLOSED_BLOCK_ID + "]", clusterState.blocks().indices().getOrDefault(indexName, emptySet()).stream() .filter(clusterBlock -> clusterBlock.id() == MetadataIndexStateService.INDEX_CLOSED_BLOCK_ID).count(), equalTo(1L)); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.codeStyle.extractor; import com.intellij.application.options.CodeStyle; import com.intellij.lang.Language; import com.intellij.lang.LanguageFormatting; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.ui.popup.Balloon; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.WindowManager; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.codeStyle.*; import com.intellij.psi.codeStyle.extractor.differ.LangCodeStyleExtractor; import com.intellij.psi.codeStyle.extractor.processor.CodeStyleDeriveProcessor; import com.intellij.psi.codeStyle.extractor.processor.GenProcessor; import com.intellij.psi.codeStyle.extractor.ui.CodeStyleSettingsNameProvider; import com.intellij.psi.codeStyle.extractor.ui.ExtractedSettingsDialog; import com.intellij.psi.codeStyle.extractor.values.Value; import com.intellij.psi.codeStyle.extractor.values.ValuesExtractionResult; import com.intellij.psi.impl.source.codeStyle.CodeStyleSchemesImpl; import com.intellij.ui.BalloonLayout; import com.intellij.ui.awt.RelativePoint; import com.intellij.util.ui.PositionTracker; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.HyperlinkEvent; import javax.swing.event.HyperlinkListener; import java.awt.*; import java.util.List; import java.util.Map; public class ExtractCodeStyleAction extends AnAction implements DumbAware { @Override public void actionPerformed(@NotNull AnActionEvent e) { DataContext dataContext = e.getDataContext(); final Project project = CommonDataKeys.PROJECT.getData(dataContext); if (project == null) { return; } Editor editor = CommonDataKeys.EDITOR.getData(dataContext); final VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext); PsiFile file = null; if (editor == null && files != null && files.length == 1 && !files[0].isDirectory()) { file = PsiManager.getInstance(project).findFile(files[0]); } else if (editor != null) { file = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument()); } if (file == null) { return; } Language language = file.getLanguage(); final LangCodeStyleExtractor extractor = LangCodeStyleExtractor.EXTENSION.forLanguage(language); if (extractor == null) { return; } final CodeStyleSettings settings = CodeStyle.getSettings(file); final CodeStyleDeriveProcessor genProcessor = new GenProcessor(extractor); final PsiFile finalFile = file; final Task.Backgroundable task = new Task.Backgroundable(project, "Code Style Extractor", true) { @Override public void run(@NotNull ProgressIndicator indicator) { try { CodeStyleSettings cloneSettings = settings.clone(); Map<Value, Object> backup = genProcessor.backupValues(cloneSettings, language); ValuesExtractionResult res = genProcessor.runWithProgress(project, cloneSettings, finalFile, indicator); reportResult(genProcessor.getHTMLReport(), res, project, cloneSettings, finalFile, backup); } catch (ProcessCanceledException e) { Utils.logError("Code extraction was canceled"); } catch (Throwable t) { Utils.logError("Unexpected exception: " + t); } } }; ProgressManager.getInstance().run(task); } public void reportResult(@NotNull final String htmlReport, @NotNull final ValuesExtractionResult calculatedValues, @NotNull final Project project, @NotNull final CodeStyleSettings cloneSettings, @NotNull final PsiFile file, @NotNull final Map<Value, Object> backup) { UIUtil.invokeLaterIfNeeded(() -> { final Balloon balloon = JBPopupFactory .getInstance() .createHtmlTextBalloonBuilder( "<html>Formatting Options were extracted for " + file.getName() + (!htmlReport.isEmpty() ? ("<br/>" + htmlReport) : "") + "<br/><a href=\"apply\">Apply</a> <a href=\"details\">Details...</a></html>", MessageType.INFO, new HyperlinkListener() { @Override public void hyperlinkUpdate(HyperlinkEvent e) { if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) { boolean apply = "apply".equals(e.getDescription()); ExtractedSettingsDialog myDialog = null; if (!apply) { final List<Value> values = calculatedValues.getValues(); final LanguageCodeStyleSettingsProvider[] providers = Extensions.getExtensions( LanguageCodeStyleSettingsProvider.EP_NAME); Language language = file.getLanguage(); CodeStyleSettingsNameProvider nameProvider = new CodeStyleSettingsNameProvider(); for (final LanguageCodeStyleSettingsProvider provider : providers) { Language target = provider.getLanguage(); if (target.equals(language)) { //this is our language nameProvider.addSettings(provider); myDialog = new ExtractedSettingsDialog(project, nameProvider, values); apply = myDialog.showAndGet(); break; } } } if (apply /*&& myDialog != null*/) { //create new settings named after the file //final ExtractedSettingsDialog finalMyDialog = myDialog; //calculatedValues.applyConditioned(value -> finalMyDialog.valueIsSelectedInTree(value), backup); calculatedValues.applySelected(); CodeStyleScheme derivedScheme = CodeStyleSchemes .getInstance() .createNewScheme("Derived from " + file.getName(), null); derivedScheme.getCodeStyleSettings().copyFrom(cloneSettings); CodeStyleSchemes.getInstance().addScheme(derivedScheme); CodeStyleSchemesImpl.getSchemeManager().setCurrent(derivedScheme); CodeStyleSettingsManager.getInstance(project).PREFERRED_PROJECT_CODE_STYLE = derivedScheme.getName(); } } } } ) .setFadeoutTime(0) .setShowCallout(false) .setAnimationCycle(0) .setHideOnClickOutside(false) .setHideOnKeyOutside(false) .setHideOnLinkClick(true) .setCloseButtonEnabled(true) .createBalloon(); Disposer.register(project, balloon); Window window = WindowManager.getInstance().getFrame(project); if (window == null) { window = JOptionPane.getRootFrame(); } if (window instanceof IdeFrame) { BalloonLayout layout = ((IdeFrame)window).getBalloonLayout(); if (layout != null) { balloon.show(new PositionTracker<Balloon>(((IdeFrame)window).getComponent()) { @Override public RelativePoint recalculateLocation(Balloon object) { Component c = getComponent(); int y = c.getHeight() - 45; return new RelativePoint(c, new Point(c.getWidth() - 150, y)); } }, Balloon.Position.above); } } }); } @Override public void update(@NotNull AnActionEvent event) { Presentation presentation = event.getPresentation(); DataContext dataContext = event.getDataContext(); Project project = CommonDataKeys.PROJECT.getData(dataContext); if (project == null) { presentation.setEnabled(false); return; } Editor editor = CommonDataKeys.EDITOR.getData(dataContext); PsiFile file = null; if (editor != null) { file = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument()); } else { final VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext); if (files != null && files.length == 1 && !files[0].isDirectory()) { file = PsiManager.getInstance(project).findFile(files[0]); } } if (file == null || file.getVirtualFile() == null) { presentation.setEnabled(false); return; } if (LanguageFormatting.INSTANCE.forContext(file) != null) { presentation.setEnabled(true); } } }
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.wizardpager.wizard.ui; import com.example.android.wizardpager.R; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.RectF; import android.util.AttributeSet; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; public class StepPagerStrip extends View { private static final int[] ATTRS = new int[]{ android.R.attr.gravity }; private int mPageCount; private int mCurrentPage; private int mGravity = Gravity.LEFT | Gravity.TOP; private float mTabWidth; private float mTabHeight; private float mTabSpacing; private Paint mPrevTabPaint; private Paint mSelectedTabPaint; private Paint mSelectedLastTabPaint; private Paint mNextTabPaint; private RectF mTempRectF = new RectF(); //private Scroller mScroller; private OnPageSelectedListener mOnPageSelectedListener; public StepPagerStrip(Context context) { this(context, null, 0); } public StepPagerStrip(Context context, AttributeSet attrs) { this(context, attrs, 0); } public StepPagerStrip(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); final TypedArray a = context.obtainStyledAttributes(attrs, ATTRS); mGravity = a.getInteger(0, mGravity); a.recycle(); final Resources res = getResources(); mTabWidth = res.getDimensionPixelSize(R.dimen.step_pager_tab_width); mTabHeight = res.getDimensionPixelSize(R.dimen.step_pager_tab_height); mTabSpacing = res.getDimensionPixelSize(R.dimen.step_pager_tab_spacing); mPrevTabPaint = new Paint(); mPrevTabPaint.setColor(res.getColor(R.color.step_pager_previous_tab_color)); mSelectedTabPaint = new Paint(); mSelectedTabPaint.setColor(res.getColor(R.color.step_pager_selected_tab_color)); mSelectedLastTabPaint = new Paint(); mSelectedLastTabPaint.setColor(res.getColor(R.color.step_pager_selected_last_tab_color)); mNextTabPaint = new Paint(); mNextTabPaint.setColor(res.getColor(R.color.step_pager_next_tab_color)); } public void setOnPageSelectedListener(OnPageSelectedListener onPageSelectedListener) { mOnPageSelectedListener = onPageSelectedListener; } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (mPageCount == 0) { return; } float totalWidth = mPageCount * (mTabWidth + mTabSpacing) - mTabSpacing; float totalLeft; boolean fillHorizontal = false; switch (mGravity & Gravity.HORIZONTAL_GRAVITY_MASK) { case Gravity.CENTER_HORIZONTAL: totalLeft = (getWidth() - totalWidth) / 2; break; case Gravity.RIGHT: totalLeft = getWidth() - getPaddingRight() - totalWidth; break; case Gravity.FILL_HORIZONTAL: totalLeft = getPaddingLeft(); fillHorizontal = true; break; default: totalLeft = getPaddingLeft(); } switch (mGravity & Gravity.VERTICAL_GRAVITY_MASK) { case Gravity.CENTER_VERTICAL: mTempRectF.top = (int) (getHeight() - mTabHeight) / 2; break; case Gravity.BOTTOM: mTempRectF.top = getHeight() - getPaddingBottom() - mTabHeight; break; default: mTempRectF.top = getPaddingTop(); } mTempRectF.bottom = mTempRectF.top + mTabHeight; float tabWidth = mTabWidth; if (fillHorizontal) { tabWidth = (getWidth() - getPaddingRight() - getPaddingLeft() - (mPageCount - 1) * mTabSpacing) / mPageCount; } for (int i = 0; i < mPageCount; i++) { mTempRectF.left = totalLeft + (i * (tabWidth + mTabSpacing)); mTempRectF.right = mTempRectF.left + tabWidth; canvas.drawRect(mTempRectF, i < mCurrentPage ? mPrevTabPaint : (i > mCurrentPage ? mNextTabPaint : (i == mPageCount - 1 ? mSelectedLastTabPaint : mSelectedTabPaint))); } } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { setMeasuredDimension( View.resolveSize( (int) (mPageCount * (mTabWidth + mTabSpacing) - mTabSpacing) + getPaddingLeft() + getPaddingRight(), widthMeasureSpec), View.resolveSize( (int) mTabHeight + getPaddingTop() + getPaddingBottom(), heightMeasureSpec)); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { scrollCurrentPageIntoView(); super.onSizeChanged(w, h, oldw, oldh); } @Override public boolean onTouchEvent(MotionEvent event) { if (mOnPageSelectedListener != null) { switch (event.getActionMasked()) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_MOVE: int position = hitTest(event.getX()); if (position >= 0) { mOnPageSelectedListener.onPageStripSelected(position); } return true; } } return super.onTouchEvent(event); } private int hitTest(float x) { if (mPageCount == 0) { return -1; } float totalWidth = mPageCount * (mTabWidth + mTabSpacing) - mTabSpacing; float totalLeft; boolean fillHorizontal = false; switch (mGravity & Gravity.HORIZONTAL_GRAVITY_MASK) { case Gravity.CENTER_HORIZONTAL: totalLeft = (getWidth() - totalWidth) / 2; break; case Gravity.RIGHT: totalLeft = getWidth() - getPaddingRight() - totalWidth; break; case Gravity.FILL_HORIZONTAL: totalLeft = getPaddingLeft(); fillHorizontal = true; break; default: totalLeft = getPaddingLeft(); } float tabWidth = mTabWidth; if (fillHorizontal) { tabWidth = (getWidth() - getPaddingRight() - getPaddingLeft() - (mPageCount - 1) * mTabSpacing) / mPageCount; } float totalRight = totalLeft + (mPageCount * (tabWidth + mTabSpacing)); if (x >= totalLeft && x <= totalRight && totalRight > totalLeft) { return (int) (((x - totalLeft) / (totalRight - totalLeft)) * mPageCount); } else { return -1; } } public void setCurrentPage(int currentPage) { mCurrentPage = currentPage; invalidate(); scrollCurrentPageIntoView(); // TODO: Set content description appropriately } private void scrollCurrentPageIntoView() { // TODO: only works with left gravity for now // // float widthToActive = getPaddingLeft() + (mCurrentPage + 1) * (mTabWidth + mTabSpacing) // - mTabSpacing; // int viewWidth = getWidth(); // // int startScrollX = getScrollX(); // int destScrollX = (widthToActive > viewWidth) ? (int) (widthToActive - viewWidth) : 0; // // if (mScroller == null) { // mScroller = new Scroller(getContext()); // } // // mScroller.abortAnimation(); // mScroller.startScroll(startScrollX, 0, destScrollX - startScrollX, 0); // postInvalidate(); } public void setPageCount(int count) { mPageCount = count; invalidate(); // TODO: Set content description appropriately } public static interface OnPageSelectedListener { void onPageStripSelected(int position); } // // @Override // public void computeScroll() { // super.computeScroll(); // if (mScroller.computeScrollOffset()) { // setScrollX(mScroller.getCurrX()); // } // } }
/** */ package geometry.provider; import geometry.util.GeometryAdapterFactory; import java.util.ArrayList; import java.util.Collection; import org.eclipse.emf.common.notify.Adapter; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.Notifier; import org.eclipse.emf.edit.provider.ChangeNotifier; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ComposedAdapterFactory; import org.eclipse.emf.edit.provider.IChangeNotifier; import org.eclipse.emf.edit.provider.IDisposable; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.INotifyChangedListener; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; /** * This is the factory that is used to provide the interfaces needed to support Viewers. * The adapters generated by this factory convert EMF adapter notifications into calls to {@link #fireNotifyChanged fireNotifyChanged}. * The adapters also support Eclipse property sheets. * Note that most of the adapters are shared among multiple instances. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class GeometryItemProviderAdapterFactory extends GeometryAdapterFactory implements ComposeableAdapterFactory, IChangeNotifier, IDisposable { /** * This keeps track of the root adapter factory that delegates to this adapter factory. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ComposedAdapterFactory parentAdapterFactory; /** * This is used to implement {@link org.eclipse.emf.edit.provider.IChangeNotifier}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IChangeNotifier changeNotifier = new ChangeNotifier(); /** * This keeps track of all the supported types checked by {@link #isFactoryForType isFactoryForType}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<Object> supportedTypes = new ArrayList<Object>(); /** * This constructs an instance. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public GeometryItemProviderAdapterFactory() { supportedTypes.add(IEditingDomainItemProvider.class); supportedTypes.add(IStructuredItemContentProvider.class); supportedTypes.add(ITreeItemContentProvider.class); supportedTypes.add(IItemLabelProvider.class); supportedTypes.add(IItemPropertySource.class); } /** * This keeps track of the one adapter used for all {@link geometry.Geometry} instances. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected GeometryItemProvider geometryItemProvider; /** * This creates an adapter for a {@link geometry.Geometry}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Adapter createGeometryAdapter() { if (geometryItemProvider == null) { geometryItemProvider = new GeometryItemProvider(this); } return geometryItemProvider; } /** * This keeps track of the one adapter used for all {@link geometry.GObject} instances. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected GObjectItemProvider gObjectItemProvider; /** * This creates an adapter for a {@link geometry.GObject}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Adapter createGObjectAdapter() { if (gObjectItemProvider == null) { gObjectItemProvider = new GObjectItemProvider(this); } return gObjectItemProvider; } /** * This keeps track of the one adapter used for all {@link geometry.Line} instances. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected LineItemProvider lineItemProvider; /** * This creates an adapter for a {@link geometry.Line}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Adapter createLineAdapter() { if (lineItemProvider == null) { lineItemProvider = new LineItemProvider(this); } return lineItemProvider; } /** * This keeps track of the one adapter used for all {@link geometry.Point} instances. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected PointItemProvider pointItemProvider; /** * This creates an adapter for a {@link geometry.Point}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Adapter createPointAdapter() { if (pointItemProvider == null) { pointItemProvider = new PointItemProvider(this); } return pointItemProvider; } /** * This keeps track of the one adapter used for all {@link geometry.BendPoint} instances. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected BendPointItemProvider bendPointItemProvider; /** * This creates an adapter for a {@link geometry.BendPoint}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Adapter createBendPointAdapter() { if (bendPointItemProvider == null) { bendPointItemProvider = new BendPointItemProvider(this); } return bendPointItemProvider; } /** * This keeps track of the one adapter used for all {@link geometry.Connector} instances. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ConnectorItemProvider connectorItemProvider; /** * This creates an adapter for a {@link geometry.Connector}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Adapter createConnectorAdapter() { if (connectorItemProvider == null) { connectorItemProvider = new ConnectorItemProvider(this); } return connectorItemProvider; } /** * This keeps track of the one adapter used for all {@link geometry.InputPoint} instances. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected InputPointItemProvider inputPointItemProvider; /** * This creates an adapter for a {@link geometry.InputPoint}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Adapter createInputPointAdapter() { if (inputPointItemProvider == null) { inputPointItemProvider = new InputPointItemProvider(this); } return inputPointItemProvider; } /** * This returns the root adapter factory that contains this factory. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ComposeableAdapterFactory getRootAdapterFactory() { return parentAdapterFactory == null ? this : parentAdapterFactory.getRootAdapterFactory(); } /** * This sets the composed adapter factory that contains this factory. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setParentAdapterFactory(ComposedAdapterFactory parentAdapterFactory) { this.parentAdapterFactory = parentAdapterFactory; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean isFactoryForType(Object type) { return supportedTypes.contains(type) || super.isFactoryForType(type); } /** * This implementation substitutes the factory itself as the key for the adapter. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Adapter adapt(Notifier notifier, Object type) { return super.adapt(notifier, this); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object adapt(Object object, Object type) { if (isFactoryForType(type)) { Object adapter = super.adapt(object, type); if (!(type instanceof Class<?>) || (((Class<?>)type).isInstance(adapter))) { return adapter; } } return null; } /** * This adds a listener. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void addListener(INotifyChangedListener notifyChangedListener) { changeNotifier.addListener(notifyChangedListener); } /** * This removes a listener. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void removeListener(INotifyChangedListener notifyChangedListener) { changeNotifier.removeListener(notifyChangedListener); } /** * This delegates to {@link #changeNotifier} and to {@link #parentAdapterFactory}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void fireNotifyChanged(Notification notification) { changeNotifier.fireNotifyChanged(notification); if (parentAdapterFactory != null) { parentAdapterFactory.fireNotifyChanged(notification); } } /** * This disposes all of the item providers created by this factory. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void dispose() { if (geometryItemProvider != null) geometryItemProvider.dispose(); if (gObjectItemProvider != null) gObjectItemProvider.dispose(); if (lineItemProvider != null) lineItemProvider.dispose(); if (pointItemProvider != null) pointItemProvider.dispose(); if (bendPointItemProvider != null) bendPointItemProvider.dispose(); if (connectorItemProvider != null) connectorItemProvider.dispose(); if (inputPointItemProvider != null) inputPointItemProvider.dispose(); } }
// Copyright 2004 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package gravity.util; import gravity.Component; import gravity.ExceptionWrapper; import gravity.UsageException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; /** * Static utility methods for handling reflection. * * @author Howard Lewis Ship * @author Harish Krishnaswamy * @version $Id: ReflectUtils.java,v 1.8 2005-10-06 21:59:26 harishkswamy Exp $ */ public class ReflectUtils { private ExceptionWrapper _exceptionWrapper; public ReflectUtils(ExceptionWrapper exceptionWrapper) { _exceptionWrapper = exceptionWrapper; } /** * Map from primitive type to wrapper type. */ private static final Map PRIMITIVE_MAP = new HashMap(); static { PRIMITIVE_MAP.put(boolean.class, Boolean.class); PRIMITIVE_MAP.put(byte.class, Byte.class); PRIMITIVE_MAP.put(char.class, Character.class); PRIMITIVE_MAP.put(short.class, Short.class); PRIMITIVE_MAP.put(int.class, Integer.class); PRIMITIVE_MAP.put(long.class, Long.class); PRIMITIVE_MAP.put(float.class, Float.class); PRIMITIVE_MAP.put(double.class, Double.class); } private String getTypeName(Class type) { return type == null ? null : type.getName(); } private String typesToString(Class[] types) { if (types == null || types.length == 0) return ""; StringBuffer buf = new StringBuffer(); for (int i = 0; i < types.length - 1; i++) buf.append(getTypeName(types[i])).append(", "); String typeName = getTypeName(types[types.length - 1]); buf.append(typeName); return buf.toString(); } private boolean isCompatible(Class paramType, Class valueType) { if (paramType.isAssignableFrom(valueType)) return true; // Reflection fudges the assignment of a wrapper class to a primitive // type ... we check for that the hard way. if (paramType.isPrimitive()) { Class wrapperClass = (Class) PRIMITIVE_MAP.get(paramType); return wrapperClass.isAssignableFrom(valueType); } return false; } private boolean isMatch(Class[] paramTypes, Class[] valueTypes) { if (paramTypes.length != valueTypes.length) return false; for (int i = 0; i < paramTypes.length; i++) { if (valueTypes[i] == null) { if (paramTypes[i].isPrimitive()) return false; continue; } if (!isCompatible(paramTypes[i], valueTypes[i])) return false; } return true; } private Constructor findConstructor(Class targetClass, Class[] argTypes) { Constructor[] constructors = targetClass.getConstructors(); for (int i = 0; i < constructors.length; i++) { if (isMatch(constructors[i].getParameterTypes(), argTypes)) return constructors[i]; } throw _exceptionWrapper.wrap(new UsageException(), Message.CANNOT_FIND_CONSTRUCTOR, targetClass.getName(), typesToString(argTypes)); } private Class[] getTypes(Object[] args) { if (args == null) args = new Object[0]; Class[] argTypes = new Class[args.length]; for (int i = 0; i < args.length; i++) argTypes[i] = args[i] == null ? null : args[i].getClass(); return argTypes; } /** * Searches for a constructor matching against the provided arguments. * * @param targetClass * the class to be instantiated * @param args * the parameters to pass to the constructor (may be null or empty) * @return the new instance * @throws UsageException * on any failure */ public Object invokeConstructor(Class targetClass, Object[] args) { Class[] argTypes = null; try { argTypes = getTypes(args); Constructor ctor = findConstructor(targetClass, argTypes); return ctor.newInstance(args); } catch (Exception e) { throw _exceptionWrapper.wrap(e, Message.CANNOT_INVOKE_CONSTRUCTOR, targetClass, typesToString(argTypes)); } } private Method findMethod(Class targetClass, String methodName, Class[] argTypes) { Method[] methods = targetClass.getMethods(); for (int i = 0; i < methods.length; i++) { if (!methods[i].getName().equals(methodName)) continue; if (isMatch(methods[i].getParameterTypes(), argTypes)) return methods[i]; } throw _exceptionWrapper.wrap(new UsageException(), Message.CANNOT_FIND_METHOD, methodName, typesToString(argTypes), targetClass); } /** * Invokes the provided method on the provided target object with the provided arguments. * * @return Returns the result of the method invocation. */ public Object invokeMethod(Object target, String methodName, Object[] args) { Class[] argTypes = null; argTypes = getTypes(args); Method method = findMethod(target.getClass(), methodName, argTypes); return invokeMethod(target, method, args, null); } /** * This method invokes the provided method relectively on the provided target object with the * provided arguments. The {@link Component}parameter is simply used in forming a message in * the event of an error and can be passed a null. * * @return Returns the result of the method invocation. * @throws WrapperException * {@link Message#CANNOT_INVOKE_METHOD} */ public Object invokeMethod(Object target, Method method, Object[] args, Component comp) { try { return method.invoke(target, args); } catch (Throwable t) { while (t instanceof InvocationTargetException) t = ((InvocationTargetException) t).getTargetException(); String compStr = comp == null ? "" : " on component: " + comp; throw _exceptionWrapper.wrap(t, Message.CANNOT_INVOKE_METHOD, method, compStr); } } }
///////////////////////////////////////////////////////////////////// // @(#)RouteMap.java // // @author Bruno Quoitin (bqu@infonet.fundp.ac.be) // @date 18/07/2002 // @lastdate 10/08/2002 ///////////////////////////////////////////////////////////////////// package infonet.javasim.bgp4.policy; import java.io.*; import java.lang.*; import java.util.ArrayList; import java.util.Stack; // ===== infonet.javasim.bgp4.policy.RouteMap ==================== // /** * The RouteMap class is a helper to build complex filtering rules using Rule, Clause, Predicate, AtomicPredicate, Action and AtomicAction. <br><br> <strong>RouteMap syntax:</strong><br> <p> rule ::= clause | ( clause "\n" )* rule<br> clause ::= ( predicate )? "|" ( action )?<br> predicate ::= atomic-predicate | ( atomic-predicate "," ) predicate<br> atomic-predicate ::= attribute predicate-matcher<br> action ::= atomic-action | (atomic-action "," ) action<br> atomic-action ::= attribute action-type (action-values)* | deny<br> </p> The following comments delimiters are allowed: C-style, C++-style and bash-style.<br><br> <strong>RouteMap example:</strong><br> <p> <i><b>|as_path prepend 3,as_path prepend 3</b></i> </p> This route-map statement will prepend 2 times AS number 3 to each route (predicate statement is empty). * @see Rule * @see Clause * @see Predicate * @see AtomicPredicate * @see Action * @see AtomicPredicate */ public class RouteMap extends Rule { private String script; // Special tokens private final char ST_CLAUSE_DELIMITER= '|'; private final char ST_ATOM_DELIMITER= ','; private final char ST_QUOTE_DELIMITER= '"'; // Syntactic analyzer states private final int SA_RULE= 0; private final int SA_PREDICATE= 100; private final int SA_ATOMIC_PREDICATE= 101; private final int SA_PREDICATE1= 102; private final int SA_ACTION= 200; private final int SA_ATOMIC_ACTION1= 201; private final int SA_ATOMIC_ACTION2= 202; private final int SA_ACTION1= 203; private final int SA_ACTION2= 204; // State machine variables private int token= 0; private int saState= SA_RULE; private StreamTokenizer st= null; // Current atomic predicates private String atomicPredicateAttribute; private String atomicPredicateMatcher; private ArrayList atomicPredicates= null; // Current atomic actions private String atomicActionAttribute; private String atomicActionAction; private ArrayList atomicActionValues= null; private ArrayList atomicActions= null; private boolean actionDeny= false; // ----- RouteMap constructor ------------------------------- // /** Build a new RouteMap (Rule), parse the given script, build * filtering clauses and add these to the RouteMap (Rule). */ public RouteMap(String script) throws SyntacticException { super(false,null); this.script= script; parseScript(script); } // ----- RouteMap.getScript ----------------------------------- // /** * Return the route-map script. * @uml.property name="script" */ public String getScript() { return script; } // ----- RouteMap.parseScript ------------------------------- // /** This method parses the route-map script and build filtering * clauses that are added to this RouteMap (Rule). */ public void parseScript(String script) throws SyntacticException { // Set up lexical analyzer StringReader sr= new StringReader(script); st= new StreamTokenizer(sr); st.eolIsSignificant(true); // report end-of-line st.lowerCaseMode(true); // convert everything to lower // case st.slashStarComments(true); // allow C-style comments st.slashSlashComments(true); // allow C++-style comments st.commentChar('#'); // allow bash-style comments st.quoteChar(ST_QUOTE_DELIMITER); // quote delimiter st.wordChars('_', '_'); // underscore is allowed in a // word // Set up syntactic and semantic analyzers saState= SA_RULE; token= 0; atomicPredicates= null; atomicActions= null; try { do { token= st.nextToken(); switch (saState) { case SA_RULE: handle_RULE(); break; case SA_PREDICATE: handle_PREDICATE(); break; case SA_ATOMIC_PREDICATE: handle_ATOMIC_PREDICATE(); break; case SA_PREDICATE1: handle_PREDICATE1(); break; case SA_ACTION: handle_ACTION(); break; case SA_ATOMIC_ACTION1: handle_ATOMIC_ACTION1(); break; case SA_ATOMIC_ACTION2: handle_ATOMIC_ACTION2(); break; case SA_ACTION1: handle_ACTION1(); break; case SA_ACTION2: handle_ACTION2(); break; default: throwSyntacticException(); } } while (token != StreamTokenizer.TT_EOF); } catch (IOException e) { e.printStackTrace(); } } // ----- RouteMap.handle_RULE -------------------------------- // /** Private method to handle state RULE. */ private void handle_RULE() throws SyntacticException { switch (token) { case StreamTokenizer.TT_EOL: break; case StreamTokenizer.TT_EOF: break; case ST_CLAUSE_DELIMITER: saState= SA_ACTION; break; default: saState= SA_PREDICATE; handle_PREDICATE(); } } // ----- RouteMap.handle_PREDICATE --------------------------- // /** Private method to handle state PREDICATE. */ private void handle_PREDICATE() throws SyntacticException { switch (token) { case StreamTokenizer.TT_WORD: atomicPredicateAttribute= st.sval; saState= SA_ATOMIC_PREDICATE; break; default: throwSyntacticException(); } } // ----- RouteMap.handle_ATOMIC_PREDICATE -------------------- // /** Private method to handle state ATOMIC_PREDICATE. */ private void handle_ATOMIC_PREDICATE() throws SyntacticException { switch (token) { case StreamTokenizer.TT_NUMBER: atomicPredicateMatcher= ""+((int) st.nval); addAtomicPredicate(); saState= SA_PREDICATE1; break; case ST_QUOTE_DELIMITER: atomicPredicateMatcher= st.sval; addAtomicPredicate(); saState= SA_PREDICATE1; break; default: throwSyntacticException(); } } // ----- RouteMap.handle_PREDICATE1 -------------------------- // /** Private method to handle state PREDICATE1. */ private void handle_PREDICATE1() throws SyntacticException { switch (token) { case ST_CLAUSE_DELIMITER: saState= SA_ACTION; break; case ST_ATOM_DELIMITER: saState= SA_PREDICATE; break; default: throwSyntacticException(); } } // ----- RouteMap.handle_ACTION ------------------------------ // /** Private method to handle state ACTION. */ private void handle_ACTION() throws SyntacticException { switch (token) { case StreamTokenizer.TT_WORD: atomicActionAttribute= st.sval; if (atomicActionAttribute.equals("deny")) { actionDeny= true; saState= SA_ACTION2; } else saState= SA_ATOMIC_ACTION1; break; case StreamTokenizer.TT_EOF: case StreamTokenizer.TT_EOL: addClause(); saState= SA_RULE; break; default: throwSyntacticException(); } } // ----- RouteMap.handle_ATOMIC_ACTION1 ---------------------- // /** Private method to handle state ATOMIC_ACTION1. */ private void handle_ATOMIC_ACTION1() throws SyntacticException { switch (token) { case StreamTokenizer.TT_WORD: atomicActionAction= st.sval; saState= SA_ATOMIC_ACTION2; break; default: throwSyntacticException(); } } // ----- RouteMap.handle_ATOMIC_ACTION2 ---------------------- // /** Private method to handle state ATOMIC_ACTION2. */ private void handle_ATOMIC_ACTION2() throws SyntacticException { switch (token) { case StreamTokenizer.TT_NUMBER: addAtomicActionValue(""+((int) st.nval)); // Many values are accepted, parser state must not be // changed. break; case StreamTokenizer.TT_WORD: case ST_QUOTE_DELIMITER: addAtomicActionValue(st.sval); // Many values are accepted, parser state must not be // changed. break; case StreamTokenizer.TT_EOF: case StreamTokenizer.TT_EOL: addAtomicAction(); addClause(); saState= SA_RULE; break; case ST_ATOM_DELIMITER: addAtomicAction(); saState= SA_ACTION1; break; default: throwSyntacticException(); } } // ----- RouteMap.handle_ACTION1 ----------------------------- // /** Private method to handle state ACTION1. */ private void handle_ACTION1() throws SyntacticException { switch (token) { case StreamTokenizer.TT_WORD: atomicActionAttribute= st.sval; saState= SA_ATOMIC_ACTION1; break; default: throwSyntacticException(); } } // ----- RouteMap.handle_ACTION2 ----------------------------- // /** Private method to handle state ACTION2. */ private void handle_ACTION2() throws SyntacticException { switch (token) { case StreamTokenizer.TT_EOF: case StreamTokenizer.TT_EOL: addClause(); saState= SA_RULE; break; default: throwSyntacticException(); } } // ----- RouteMap.throwSyntacticException -------------------- // /** Throw a SyntacticException with the current script line * number, the last token that has been read and the current state of * the parser. * * @see SyntacticException */ private void throwSyntacticException() throws SyntacticException { String msg= ""+st.lineno()+": token="; switch (token) { case StreamTokenizer.TT_EOF: msg+= "EOF"; break; case StreamTokenizer.TT_EOL: msg+= "EOL"; break; case StreamTokenizer.TT_NUMBER: msg+= "NUMBER("+st.nval+")"; break; case StreamTokenizer.TT_WORD: msg+= "WORD("+st.sval+")"; break; default: msg+= "UNKNOWN("+((char) token)+")"; } msg+= ",state="; switch (saState) { case SA_RULE: msg+= "RULE"; break; case SA_PREDICATE: msg+= "PREDICATE"; break; case SA_ATOMIC_PREDICATE: msg+= "ATOMIC_PREDICATE"; break; case SA_ACTION: msg+= "ACTION"; break; case SA_ATOMIC_ACTION1: msg+= "ATOMIC_ACTION1"; break; case SA_ATOMIC_ACTION2: msg+= "ATOMIC_ACTION2"; break; case SA_ACTION1: msg+= "ACTION1"; break; default: msg+= "?"; } throw new SyntacticException(msg); } // ----- RouteMap.addAtomicPredicate ------------------------- // /** Private method used to add a new atomic predicate to the * current clause. */ private void addAtomicPredicate() { if (atomicPredicates == null) atomicPredicates= new ArrayList(); atomicPredicates.add(new AtomicPredicate(atomicPredicateAttribute, atomicPredicateMatcher)); atomicPredicateAttribute= null; atomicPredicateMatcher= null; } // ----- RouteMap.addAtomicAction ---------------------------- // /** Private method used to add a new atomic action to the current * clause. */ private void addAtomicAction() { if (!actionDeny) { if (atomicActions == null) atomicActions= new ArrayList(); if (atomicActionValues == null) atomicActionValues= new ArrayList(); atomicActions.add(new AtomicAction(atomicActionAttribute, atomicActionAction, atomicActionValues)); } atomicActionAttribute= null; atomicActionAction= null; atomicActionValues= null; } // ----- RouteMap.addAtomicActionValue ----------------------- // /** Private method used to add an atomic action value to the * current atomic action. */ private void addAtomicActionValue(String value) { if (atomicActionValues == null) atomicActionValues= new ArrayList(); atomicActionValues.add(value); } // ----- RouteMap.addClause ---------------------------------- // /** Private method used to build a new clause with current * predicates and actions. This new clause is added to the * RouteMap (Rule). */ private void addClause() { if (atomicPredicates == null) atomicPredicates= new ArrayList(); if (atomicActions == null) atomicActions= new ArrayList(); // Build a new clause and add the new clause to the rule if (actionDeny) add_clause(new Clause(new Predicate(atomicPredicates), new Action(false))); else add_clause(new Clause(new Predicate(atomicPredicates), new Action(true, atomicActions))); // Clear list of predicates and list of actions atomicPredicates= null; atomicActions= null; actionDeny= false; } // ----- RouteMap.SyntacticException ------------------------- // /** Exception thrown when a syntactic error has been detected. The * exception message contains the line number, the token that * caused the error and the state of the parser. */ public class SyntacticException extends Exception { public SyntacticException(String msg) { super(msg); System.out.println("syntactic error: "+msg); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.snmp.processors; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.snmp4j.PDU; import org.snmp4j.ScopedPDU; import org.snmp4j.event.ResponseEvent; import org.snmp4j.mp.SnmpConstants; import org.snmp4j.smi.AbstractVariable; import org.snmp4j.smi.AssignableFromInteger; import org.snmp4j.smi.AssignableFromLong; import org.snmp4j.smi.AssignableFromString; import org.snmp4j.smi.OID; import org.snmp4j.smi.OctetString; import org.snmp4j.smi.Variable; import org.snmp4j.smi.VariableBinding; /** * Performs a SNMP Set operation based on attributes of incoming FlowFile. * Upon each invocation of {@link #onTrigger(ProcessContext, ProcessSession)} * method, it will inspect attributes of FlowFile and look for attributes with * name formatted as "snmp$OID" to set the attribute value to this OID. */ @Tags({ "snmp", "set", "oid" }) @InputRequirement(Requirement.INPUT_REQUIRED) @CapabilityDescription("Based on incoming FlowFile attributes, the processor will execute SNMP Set requests." + " When founding attributes with name like snmp$<OID>, the processor will atempt to set the value of" + " attribute to the corresponding OID given in the attribute name") public class SetSNMP extends AbstractSNMPProcessor<SNMPSetter> { /** relationship for success */ public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All FlowFiles that have been successfully used to perform SNMP Set are routed to this relationship") .build(); /** relationship for failure */ public static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("All FlowFiles that failed during the SNMP Set care routed to this relationship") .build(); /** list of properties descriptors */ private final static List<PropertyDescriptor> propertyDescriptors; /** list of relationships */ private final static Set<Relationship> relationships; /* * Will ensure that the list of property descriptors is build only once. * Will also create a Set of relationships */ static { List<PropertyDescriptor> _propertyDescriptors = new ArrayList<>(); _propertyDescriptors.addAll(descriptors); propertyDescriptors = Collections.unmodifiableList(_propertyDescriptors); Set<Relationship> _relationships = new HashSet<>(); _relationships.add(REL_SUCCESS); _relationships.add(REL_FAILURE); relationships = Collections.unmodifiableSet(_relationships); } /** * @see org.apache.nifi.snmp.processors.AbstractSNMPProcessor#onTriggerSnmp(org.apache.nifi.processor.ProcessContext, org.apache.nifi.processor.ProcessSession) */ @Override protected void onTriggerSnmp(ProcessContext context, ProcessSession processSession) throws ProcessException { FlowFile flowFile = processSession.get(); if (flowFile != null) { // Create the PDU object PDU pdu = null; if(this.snmpTarget.getVersion() == SnmpConstants.version3) { pdu = new ScopedPDU(); } else { pdu = new PDU(); } if(this.addVariables(pdu, flowFile.getAttributes())) { pdu.setType(PDU.SET); try { ResponseEvent response = this.targetResource.set(pdu); if(response.getResponse() == null) { processSession.transfer(processSession.penalize(flowFile), REL_FAILURE); this.getLogger().error("Set request timed out or parameters are incorrect."); context.yield(); } else if(response.getResponse().getErrorStatus() == PDU.noError) { flowFile = SNMPUtils.updateFlowFileAttributesWithPduProperties(pdu, flowFile, processSession); processSession.transfer(flowFile, REL_SUCCESS); processSession.getProvenanceReporter().send(flowFile, this.snmpTarget.getAddress().toString()); } else { final String error = response.getResponse().getErrorStatusText(); flowFile = SNMPUtils.addAttribute(SNMPUtils.SNMP_PROP_PREFIX + "error", error, flowFile, processSession); processSession.transfer(processSession.penalize(flowFile), REL_FAILURE); this.getLogger().error("Failed while executing SNMP Set [{}] via " + this.targetResource + ". Error = {}", new Object[]{response.getRequest().getVariableBindings(), error}); } } catch (IOException e) { processSession.transfer(processSession.penalize(flowFile), REL_FAILURE); this.getLogger().error("Failed while executing SNMP Set via " + this.targetResource, e); context.yield(); } } else { processSession.transfer(processSession.penalize(flowFile), REL_FAILURE); this.getLogger().warn("No attributes found in the FlowFile to perform SNMP Set"); } } } /** * Method to construct {@link VariableBinding} based on {@link FlowFile} * attributes in order to update the {@link PDU} that is going to be sent to * the SNMP Agent. * @param pdu {@link PDU} to be sent * @param attributes {@link FlowFile} attributes * @return true if at least one {@link VariableBinding} has been created, false otherwise */ private boolean addVariables(PDU pdu, Map<String, String> attributes) { boolean result = false; for (Entry<String, String> attributeEntry : attributes.entrySet()) { if (attributeEntry.getKey().startsWith(SNMPUtils.SNMP_PROP_PREFIX)) { String[] splits = attributeEntry.getKey().split("\\" + SNMPUtils.SNMP_PROP_DELIMITER); String snmpPropName = splits[1]; String snmpPropValue = attributeEntry.getValue(); if(SNMPUtils.OID_PATTERN.matcher(snmpPropName).matches()) { Variable var = null; if (splits.length == 2) { // no SMI syntax defined var = new OctetString(snmpPropValue); } else { int smiSyntax = Integer.valueOf(splits[2]); var = this.stringToVariable(snmpPropValue, smiSyntax); } if(var != null) { VariableBinding varBind = new VariableBinding(new OID(snmpPropName), var); pdu.add(varBind); result = true; } } } } return result; } /** * Method to create the variable from the attribute value and the given SMI syntax value * @param value attribute value * @param smiSyntax attribute SMI Syntax * @return variable */ private Variable stringToVariable(String value, int smiSyntax) { Variable var = AbstractVariable.createFromSyntax(smiSyntax); try { if (var instanceof AssignableFromString) { ((AssignableFromString) var).setValue(value); } else if (var instanceof AssignableFromInteger) { ((AssignableFromInteger) var).setValue(Integer.valueOf(value)); } else if (var instanceof AssignableFromLong) { ((AssignableFromLong) var).setValue(Long.valueOf(value)); } else { this.getLogger().error("Unsupported conversion of [" + value +"] to " + var.getSyntaxString()); var = null; } } catch (IllegalArgumentException e) { this.getLogger().error("Unsupported conversion of [" + value +"] to " + var.getSyntaxString(), e); var = null; } return var; } /** * @see org.apache.nifi.components.AbstractConfigurableComponent#getSupportedPropertyDescriptors() */ @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { return propertyDescriptors; } /** * @see org.apache.nifi.processor.AbstractSessionFactoryProcessor#getRelationships() */ @Override public Set<Relationship> getRelationships() { return relationships; } /** * @see org.apache.nifi.snmp.processors.AbstractSNMPProcessor#finishBuildingTargetResource(org.apache.nifi.processor.ProcessContext) */ @Override protected SNMPSetter finishBuildingTargetResource(ProcessContext context) { return new SNMPSetter(this.snmp, this.snmpTarget); } }
/* * Copyright 2013 Yoshihiro Miyama * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.kyakujin.android.autoeco.db.dao; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteException; import android.media.AudioManager; import android.net.Uri; import com.kyakujin.android.autoeco.Conf; import com.kyakujin.android.autoeco.Conf.EcoExecFrom; import com.kyakujin.android.autoeco.Conf.SleepTime; import com.kyakujin.android.autoeco.db.AutoEcoContract.EcoQuery; import com.kyakujin.android.autoeco.db.AutoEcoContract.EcoTbl; /** * Data Access Object for Eco Table. */ public class EcoDAO { Context mContext; public EcoDAO(Context context) { super(); mContext = context; } private EcoModel createDefaultEcoModel() { EcoModel model = new EcoModel(); model.setName(Conf.NONE); model.setWifiEnabled(true); model.setBluetoothEnabled(true); model.setSyncEnabled(true); model.setRotateEnabled(true); model.setBrightnessEnabled(true); model.setBrightnessValue(20); model.setBrightnessAuto(false); model.setSilentEnabled(true); model.setSilentMode(AudioManager.RINGER_MODE_NORMAL); model.setSleepEnabled(true); model.setSleepTime(1); return model; } private ContentValues createDefultContentValues() { EcoModel model = createDefaultEcoModel(); ContentValues val = new ContentValues(); val.put(EcoTbl.NAME, model.getName()); val.put(EcoTbl.WIFI_ENABLED, model.getWifiEnabled() == true ? 1 : 0); val.put(EcoTbl.BLUETOOTH_ENABLED, model.getBluetoothEnabled() == true ? 1 : 0); val.put(EcoTbl.BRIGHTNESS_ENABLED, model.getBrightnessEnabled() == true ? 1 : 0); val.put(EcoTbl.BRIGHTNESS_VALUE, model.getBrightnessValue()); val.put(EcoTbl.BRIGHTNESS_ENABLED, model.getBrightnessAuto() == true ? 1 : 0); val.put(EcoTbl.ROTATE_ENABLED, model.getRotateEnabled() == true ? 1 : 0); val.put(EcoTbl.SILENT_ENABLED, model.getSilentEnabled() == true ? 1 : 0); val.put(EcoTbl.SILENT_MODE, model.getSilentMode()); val.put(EcoTbl.SLEEP_ENABLED, model.getSleepEnabled() == true ? 1 : 0); val.put(EcoTbl.SLEEP_TIME, model.getSleepTimeOrdinal()); val.put(EcoTbl.SYNC_ENABLED, model.getSyncEnabled() == true ? 1 : 0); return val; } public Uri insertDefaultEco() { return insertEco(createDefaultEcoModel()); } public Uri insertEco(EcoModel model) { ContentValues val = createDefultContentValues(); val.put(EcoTbl.NAME, model.getName()); val.put(EcoTbl.WIFI_ENABLED, model.getWifiEnabled() == true ? 1 : 0); val.put(EcoTbl.BLUETOOTH_ENABLED, model.getBluetoothEnabled() == true ? 1 : 0); val.put(EcoTbl.BRIGHTNESS_ENABLED, model.getBrightnessEnabled() == true ? 1 : 0); val.put(EcoTbl.BRIGHTNESS_VALUE, model.getBrightnessValue()); val.put(EcoTbl.BRIGHTNESS_AUTO, model.getBrightnessAuto() == true ? 1 : 0); val.put(EcoTbl.ROTATE_ENABLED, model.getRotateEnabled() == true ? 1 : 0); val.put(EcoTbl.SILENT_ENABLED, model.getSilentEnabled() == true ? 1 : 0); val.put(EcoTbl.SILENT_MODE, model.getSilentMode()); val.put(EcoTbl.SLEEP_ENABLED, model.getSleepEnabled() == true ? 1 : 0); val.put(EcoTbl.SLEEP_TIME, model.getSleepTimeOrdinal()); val.put(EcoTbl.SYNC_ENABLED, model.getSyncEnabled() == true ? 1 : 0); return mContext.getContentResolver().insert(EcoTbl.CONTENT_URI, val); } public void updateEco(EcoModel model) { ContentValues val = new ContentValues(); val.put(EcoTbl.NAME, model.getName()); val.put(EcoTbl.WIFI_ENABLED, model.getWifiEnabled() == true ? 1 : 0); val.put(EcoTbl.BLUETOOTH_ENABLED, model.getBluetoothEnabled() == true ? 1 : 0); val.put(EcoTbl.BRIGHTNESS_ENABLED, model.getBrightnessEnabled() == true ? 1 : 0); val.put(EcoTbl.BRIGHTNESS_VALUE, model.getBrightnessValue()); val.put(EcoTbl.BRIGHTNESS_AUTO, model.getBrightnessAuto() == true ? 1 : 0); val.put(EcoTbl.ROTATE_ENABLED, model.getRotateEnabled() == true ? 1 : 0); val.put(EcoTbl.SILENT_ENABLED, model.getSilentEnabled() == true ? 1 : 0); val.put(EcoTbl.SILENT_MODE, model.getSilentMode()); val.put(EcoTbl.SLEEP_ENABLED, model.getSleepEnabled() == true ? 1 : 0); val.put(EcoTbl.SLEEP_TIME, model.getSleepTimeOrdinal()); val.put(EcoTbl.SYNC_ENABLED, model.getSyncEnabled() == true ? 1 : 0); mContext.getContentResolver().update(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(model.getId())), val, null, null); return; } public void updateWifiEnabled(int ecoId, boolean value) { ContentValues val = new ContentValues(); val.put(EcoTbl.WIFI_ENABLED, value == true ? 1 : 0); mContext.getContentResolver().update(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(ecoId)), val, null, null); return; } public void updateBluetoothEnabled(int ecoId, boolean value) { ContentValues val = new ContentValues(); val.put(EcoTbl.BLUETOOTH_ENABLED, value == true ? 1 : 0); mContext.getContentResolver().update(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(ecoId)), val, null, null); return; } public void updateRotateEnabled(int ecoId, boolean value) { ContentValues val = new ContentValues(); val.put(EcoTbl.ROTATE_ENABLED, value == true ? 1 : 0); mContext.getContentResolver().update(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(ecoId)), val, null, null); return; } public void updateSyncEnabled(int ecoId, boolean value) { ContentValues val = new ContentValues(); val.put(EcoTbl.SYNC_ENABLED, value == true ? 1 : 0); mContext.getContentResolver().update(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(ecoId)), val, null, null); return; } public void updateBrightnessValue(int ecoId, int value) { ContentValues val = new ContentValues(); val.put(EcoTbl.BRIGHTNESS_VALUE, value); mContext.getContentResolver().update(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(ecoId)), val, null, null); return; } public void updateBrightnessAuto(int ecoId, boolean enabled) { ContentValues val = new ContentValues(); val.put(EcoTbl.BRIGHTNESS_AUTO, enabled); mContext.getContentResolver().update(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(ecoId)), val, null, null); return; } public void updateSilentMode(int ecoId, int mode) { ContentValues val = new ContentValues(); val.put(EcoTbl.SILENT_MODE, mode); mContext.getContentResolver().update(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(ecoId)), val, null, null); return; } public void updateSleepTime(int ecoId, int time) { ContentValues val = new ContentValues(); val.put(EcoTbl.SLEEP_TIME, time); mContext.getContentResolver().update(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(ecoId)), val, null, null); return; } public EcoModel readToEcoModelByCursor(Cursor c) { EcoModel model = new EcoModel(); if (c != null && c.moveToFirst()) { model.setId(c.getInt(EcoQuery.Idx._ID.ordinal())); model.setName(c.getString(EcoQuery.Idx.NAME.ordinal())); model.setWifiEnabled(c.getInt(EcoQuery.Idx.WIFI_ENABLED.ordinal()) == 1 ? true : false); model.setBluetoothEnabled(c.getInt(EcoQuery.Idx.BLUETOOTH_ENABLED.ordinal()) == 1 ? true : false); model.setRotateEnabled(c.getInt(EcoQuery.Idx.ROTATE_ENABLED.ordinal()) == 1 ? true : false); model.setSyncEnabled(c.getInt(EcoQuery.Idx.SYNC_ENABLED.ordinal()) == 1 ? true : false); model.setBrightnessEnabled(c.getInt(EcoQuery.Idx.BRIGHTNESS_ENABLED.ordinal()) == 1 ? true : false); model.setBrightnessValue(c.getInt(EcoQuery.Idx.BRIGHTNESS_VALUE.ordinal())); model.setBrightnessAuto(c.getInt(EcoQuery.Idx.BRIGHTNESS_AUTO.ordinal()) == 1 ? true : false); model.setSilentEnabled(c.getInt(EcoQuery.Idx.SILENT_ENABLED.ordinal()) == 1 ? true : false); model.setSilentMode(c.getInt(EcoQuery.Idx.SILENT_MODE.ordinal())); model.setSleepEnabled(c.getInt(EcoQuery.Idx.SLEEP_ENABLED.ordinal()) == 1 ? true : false); model.setSleepTime(c.getInt(EcoQuery.Idx.SLEEP_TIME.ordinal())); } return model; } public EcoModel readToEcoModelById(int id) { EcoModel model = new EcoModel(); Cursor c = null; try { c = mContext.getContentResolver().query(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(id)), EcoQuery.PROJECTION, null, null, null); if (c != null && c.moveToFirst()) { model.setId(c.getInt(EcoQuery.Idx._ID.ordinal())); model.setName(c.getString(EcoQuery.Idx.NAME.ordinal())); model.setWifiEnabled(c.getInt(EcoQuery.Idx.WIFI_ENABLED.ordinal()) == 1 ? true : false); model.setBluetoothEnabled(c.getInt(EcoQuery.Idx.BLUETOOTH_ENABLED.ordinal()) == 1 ? true : false); model.setRotateEnabled(c.getInt(EcoQuery.Idx.ROTATE_ENABLED.ordinal()) == 1 ? true : false); model.setSyncEnabled(c.getInt(EcoQuery.Idx.SYNC_ENABLED.ordinal()) == 1 ? true : false); model.setBrightnessEnabled(c.getInt(EcoQuery.Idx.BRIGHTNESS_ENABLED.ordinal()) == 1 ? true : false); model.setBrightnessValue(c.getInt(EcoQuery.Idx.BRIGHTNESS_VALUE.ordinal())); model.setBrightnessAuto(c.getInt(EcoQuery.Idx.BRIGHTNESS_AUTO.ordinal()) == 1 ? true : false); model.setSilentEnabled(c.getInt(EcoQuery.Idx.SILENT_ENABLED.ordinal()) == 1 ? true : false); model.setSilentMode(c.getInt(EcoQuery.Idx.SILENT_MODE.ordinal())); model.setSleepEnabled(c.getInt(EcoQuery.Idx.SLEEP_ENABLED.ordinal()) == 1 ? true : false); model.setSleepTime(c.getInt(EcoQuery.Idx.SLEEP_TIME.ordinal())); } } catch (SQLiteException e) { e.printStackTrace(); } finally { if (c != null) { c.close(); } } return model; } public boolean isEcoEnabledById(int id, EcoExecFrom from, String ecotype) { MappingDAO dao = new MappingDAO(mContext); int ecoId; switch (from) { case SCHED: ecoId = dao.searchEcoIdBySchedId(id); break; case BATTERY: ecoId = dao.searchEcoIdByBatteryId(id); break; case MANUAL: ecoId = dao.searchEcoIdByManualId(id); break; default: ecoId = 0; } Cursor c = null; try { c = mContext.getContentResolver().query(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(ecoId)), EcoQuery.PROJECTION, ecotype + "='1'", null, null); if (c != null && c.moveToFirst()) { return true; } } catch (SQLiteException e) { e.printStackTrace(); } finally { if (c != null) { c.close(); } } return false; } public int searchSilentModeById(int id, EcoExecFrom from) { MappingDAO dao = new MappingDAO(mContext); int ecoId; switch (from) { case SCHED: ecoId = dao.searchEcoIdBySchedId(id); break; case BATTERY: ecoId = dao.searchEcoIdByBatteryId(id); break; case MANUAL: ecoId = dao.searchMappingIdByManualId(id); break; default: ecoId = 0; } EcoModel model = new EcoModel(); model = readToEcoModelById(ecoId); return model.getSilentMode(); } public SleepTime searchSleepTimeById(int id, EcoExecFrom from) { MappingDAO dao = new MappingDAO(mContext); int ecoId; switch (from) { case SCHED: ecoId = dao.searchEcoIdBySchedId(id); break; case BATTERY: ecoId = dao.searchEcoIdByBatteryId(id); break; case MANUAL: ecoId = dao.searchMappingIdByManualId(id); break; default: ecoId = 0; } EcoModel model = new EcoModel(); model = readToEcoModelById(ecoId); return Conf.mapSleepTime.get(model.getSleepTimeOrdinal()); } public int searchBrightnessById(int id, EcoExecFrom from) { MappingDAO dao = new MappingDAO(mContext); int ecoId; switch (from) { case SCHED: ecoId = dao.searchEcoIdBySchedId(id); break; case BATTERY: ecoId = dao.searchEcoIdByBatteryId(id); break; case MANUAL: ecoId = dao.searchMappingIdByManualId(id); break; default: ecoId = 0; } EcoModel model = new EcoModel(); model = readToEcoModelById(ecoId); return model.getBrightnessValue(); } public boolean searchAutoBrightnessById(int id, EcoExecFrom from) { MappingDAO dao = new MappingDAO(mContext); int ecoId; switch (from) { case SCHED: ecoId = dao.searchEcoIdBySchedId(id); break; case BATTERY: ecoId = dao.searchEcoIdByBatteryId(id); break; case MANUAL: ecoId = dao.searchMappingIdByManualId(id); break; default: ecoId = 0; } EcoModel model = new EcoModel(); model = readToEcoModelById(ecoId); return model.getBrightnessAuto(); } public String getTimeStamp(int id, EcoExecFrom from) { String time = ""; MappingDAO dao = new MappingDAO(mContext); int ecoId; switch (from) { case SCHED: ecoId = dao.searchEcoIdBySchedId(id); break; case BATTERY: ecoId = dao.searchEcoIdByBatteryId(id); break; case MANUAL: ecoId = dao.searchMappingIdByManualId(id); break; default: ecoId = 0; } Cursor c = null; try { c = mContext.getContentResolver().query(Uri.withAppendedPath(EcoTbl.CONTENT_URI, String.valueOf(ecoId)), EcoQuery.PROJECTION, null, null, null); if (c != null && c.moveToFirst()) { time = c.getString(EcoQuery.Idx.UPDATE_DATE.ordinal()); } } catch (SQLiteException e) { e.printStackTrace(); } finally { if (c != null) { c.close(); } } return time; } }
package com.refinedmods.refinedstorage.api.autocrafting; import com.refinedmods.refinedstorage.api.util.Action; import com.refinedmods.refinedstorage.api.util.StackListEntry; import net.minecraft.core.BlockPos; import net.minecraft.core.Direction; import net.minecraft.network.chat.Component; import net.minecraft.world.item.ItemStack; import net.minecraft.world.level.block.entity.BlockEntity; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.fluids.capability.IFluidHandler; import net.minecraftforge.items.IItemHandler; import net.minecraftforge.items.IItemHandlerModifiable; import org.apache.logging.log4j.LogManager; import javax.annotation.Nullable; import java.util.*; import java.util.stream.Collectors; import java.util.stream.IntStream; /** * Represents a network node that contains crafting patterns. */ public interface ICraftingPatternContainer { /** * Returns the interval of when a crafting step with a pattern in this container can update. * Minimum value is 0 (each tick). * <p> * Note: rather than maxing out the update interval, implementors might want to balance around {@link #getMaximumSuccessfulCraftingUpdates()}. * This method merely speeds up the update rate, it might be more interesting to increase the output rate in {@link #getMaximumSuccessfulCraftingUpdates()}. * * @return the update interval */ default int getUpdateInterval() { return 10; } /** * Returns the amount of successful crafting updates that this container can have per crafting step update. * If this limit is reached, crafting patterns from this container won't be able to update until the next * eligible crafting step update interval from {@link #getUpdateInterval()}. * * @return the maximum amount of successful crafting updates */ default int getMaximumSuccessfulCraftingUpdates() { return 1; } /** * @return the inventory that this container is connected to, or null if no inventory is present */ @Nullable IItemHandler getConnectedInventory(); /** * @return the fluid inventory that this container is connected to, or null if no fluid inventory is present */ @Nullable IFluidHandler getConnectedFluidInventory(); /** * @return the block entity that this container is connected to, or null if no block entity is present */ @Nullable BlockEntity getConnectedBlockEntity(); /** * @return the block entity that this container is facing */ BlockEntity getFacingBlockEntity(); /** * @return the direction to the facing block entity */ Direction getDirection(); /** * @return the patterns stored in this container */ List<ICraftingPattern> getPatterns(); /** * @return the pattern inventory, or null if no pattern inventory is present */ @Nullable IItemHandlerModifiable getPatternInventory(); /** * The name of this container for categorizing in the Crafting Manager GUI. * * @return the name of this container */ Component getName(); /** * @return the position of this container */ BlockPos getPosition(); /** * Containers may be daisy-chained together. If this container points to * another one, gets the root container in the chain. If containers are * not daisy-chained, returns this container. If there was a container * loop, returns null. * * @return the root pattern container */ @Nullable ICraftingPatternContainer getRootContainer(); /** * @return the UUID of this container */ UUID getUuid(); /** * @return true if the connected inventory is locked for processing patterns, false otherwise */ default boolean isLocked() { return false; } /** * Unlock the container so it may be used by processing pattern */ void unlock(); /** * Called when this container is used by a processing pattern to insert items or fluids in the connected inventory. */ default void onUsedForProcessing() { } /** * @return whether the container is successfully connected to the inventory it wants to insert to */ default boolean hasConnectedInventory() { return getConnectedInventory() != null; } /** * @return whether the container is successfully connected to the fluid inventory it wants to insert to */ default boolean hasConnectedFluidInventory() { return getConnectedFluidInventory() != null; } /** * Called by Autocrafting when it uses this crafter in a processing recipe that has items as input * * @param toInsert Collection of Itemstack stacklist entries to insert into the inventory * @param action action to perform * @return whether insertion was successful */ default boolean insertItemsIntoInventory(Collection<StackListEntry<ItemStack>> toInsert, Action action) { IItemHandler dest = getConnectedInventory(); if (toInsert.isEmpty()) { return true; } if (dest == null) { return false; } Deque<StackListEntry<ItemStack>> stacks = new ArrayDeque<>(toInsert); StackListEntry<ItemStack> currentEntry = stacks.poll(); ItemStack current = currentEntry != null ? currentEntry.getStack() : null; List<Integer> availableSlots = IntStream.range(0, dest.getSlots()).boxed().collect(Collectors.toList()); while (current != null && !availableSlots.isEmpty()) { ItemStack remainder = ItemStack.EMPTY; for (int i = 0; i < availableSlots.size(); ++i) { int slot = availableSlots.get(i); // .copy() is mandatory! remainder = dest.insertItem(slot, current.copy(), action == Action.SIMULATE); // If we inserted *something* if (remainder.isEmpty() || current.getCount() != remainder.getCount()) { availableSlots.remove(i); break; } } if (remainder.isEmpty()) { // If we inserted successfully, get a next stack. currentEntry = stacks.poll(); current = currentEntry != null ? currentEntry.getStack() : null; } else if (current.getCount() == remainder.getCount()) { // If we didn't insert anything over ALL these slots, stop here. break; } else { // If we didn't insert all, continue with other slots and use our remainder. current = remainder; } } boolean success = current == null && stacks.isEmpty(); if (!success && action == Action.PERFORM) { LogManager.getLogger().warn("Inventory unexpectedly didn't accept {}, the remainder has been voided!", current != null ? current.getDescriptionId() : null); } return success; } /** * Called by Autocrafting when it uses this crafter in a processing recipe that has fluids as input * * @param toInsert Collection of Fluidstack stacklist entries to insert into the inventory * @param action action to perform * @return whether insertion was successful */ default boolean insertFluidsIntoInventory(Collection<StackListEntry<FluidStack>> toInsert, Action action) { IFluidHandler dest = getConnectedFluidInventory(); if (toInsert.isEmpty()) { return true; } if (dest == null) { return false; } for (StackListEntry<FluidStack> entry : toInsert) { int filled = dest.fill(entry.getStack(), action == Action.SIMULATE ? IFluidHandler.FluidAction.SIMULATE : IFluidHandler.FluidAction.EXECUTE); if (filled != entry.getStack().getAmount()) { if (action == Action.PERFORM) { LogManager.getLogger().warn("Inventory unexpectedly didn't accept all of {}, the remainder has been voided!", entry.getStack().getTranslationKey()); } return false; } } return true; } }
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.handlers.proxy; import io.undertow.UndertowLogger; import io.undertow.UndertowMessages; import io.undertow.client.ClientCallback; import io.undertow.client.ClientConnection; import io.undertow.client.ClientStatistics; import io.undertow.client.UndertowClient; import io.undertow.server.ExchangeCompletionListener; import io.undertow.server.HttpServerExchange; import io.undertow.util.CopyOnWriteMap; import io.undertow.util.Headers; import io.undertow.util.WorkerUtils; import org.xnio.ChannelListener; import org.xnio.IoUtils; import org.xnio.OptionMap; import org.xnio.XnioExecutor; import org.xnio.XnioIoThread; import org.xnio.ssl.XnioSsl; import java.io.Closeable; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; import java.util.ArrayDeque; import java.util.Deque; import java.util.Map; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; /** * A pool of connections to a target host. * * This pool can also be used to open connections in exclusive mode, in which case they will not be added to the connection pool. * * In this case the caller is responsible for closing any connections. * * @author Stuart Douglas */ public class ProxyConnectionPool implements Closeable { private final URI uri; private final InetSocketAddress bindAddress; private final XnioSsl ssl; private final UndertowClient client; private final ConnectionPoolManager connectionPoolManager; private final OptionMap options; /** * Set to true when the connection pool is closed. */ private volatile boolean closed; /** * The maximum number of connections that can be established to the target */ private final int maxConnections; /** * The maximum number of connections that will be kept alive once they are idle. If a time to live is set * these connections may be timed out, depending on the value of {@link #coreCachedConnections}. * * NOTE: This value is per IO thread, so to get the actual value this must be multiplied by the number of IO threads */ private final int maxCachedConnections; /** * The minimum number of connections that this proxy connection pool will try and keep established. Once the pool * is down to this number of connections no more connections will be timed out. * * NOTE: This value is per IO thread, so to get the actual value this must be multiplied by the number of IO threads */ private final int coreCachedConnections; /** * The timeout for idle connections. Note that if {@code #coreCachedConnections} is set then once the pool is down * to the core size no more connections will be timed out. */ private final long timeToLive; /** * The total number of open connections, across all threads */ private final AtomicInteger openConnections = new AtomicInteger(0); /** * request count for all closed connections */ private final AtomicLong requestCount = new AtomicLong(); /** * read bytes for all closed connections */ private final AtomicLong read = new AtomicLong(); /** * written bytes for all closed connections */ private final AtomicLong written = new AtomicLong(); private final ConcurrentMap<XnioIoThread, HostThreadData> hostThreadData = new CopyOnWriteMap<>(); public ProxyConnectionPool(ConnectionPoolManager connectionPoolManager, URI uri, UndertowClient client, OptionMap options) { this(connectionPoolManager, uri, null, client, options); } public ProxyConnectionPool(ConnectionPoolManager connectionPoolManager,InetSocketAddress bindAddress, URI uri, UndertowClient client, OptionMap options) { this(connectionPoolManager, bindAddress, uri, null, client, options); } public ProxyConnectionPool(ConnectionPoolManager connectionPoolManager, URI uri, XnioSsl ssl, UndertowClient client, OptionMap options) { this(connectionPoolManager, null, uri, ssl, client, options); } public ProxyConnectionPool(ConnectionPoolManager connectionPoolManager, InetSocketAddress bindAddress,URI uri, XnioSsl ssl, UndertowClient client, OptionMap options) { this.connectionPoolManager = connectionPoolManager; this.maxConnections = Math.max(connectionPoolManager.getMaxConnections(), 1); this.maxCachedConnections = Math.max(connectionPoolManager.getMaxCachedConnections(), 0); this.coreCachedConnections = Math.max(connectionPoolManager.getSMaxConnections(), 0); this.timeToLive = connectionPoolManager.getTtl(); this.bindAddress = bindAddress; this.uri = uri; this.ssl = ssl; this.client = client; this.options = options; } public URI getUri() { return uri; } public InetSocketAddress getBindAddress() { return bindAddress; } public void close() { this.closed = true; for (HostThreadData data : hostThreadData.values()) { final ConnectionHolder holder = data.availableConnections.poll(); if (holder != null) { holder.clientConnection.getIoThread().execute(new Runnable() { @Override public void run() { IoUtils.safeClose(holder.clientConnection); } }); } } } /** * Called when the IO thread has completed a successful request * * @param connectionHolder The client connection holder */ private void returnConnection(final ConnectionHolder connectionHolder) { ClientStatistics stats = connectionHolder.clientConnection.getStatistics(); this.requestCount.incrementAndGet(); if(stats != null) { //we update the stats when the connection is closed this.read.addAndGet(stats.getRead()); this.written.addAndGet(stats.getWritten()); stats.reset(); } HostThreadData hostData = getData(); if (closed) { //the host has been closed IoUtils.safeClose(connectionHolder.clientConnection); ConnectionHolder con = hostData.availableConnections.poll(); while (con != null) { IoUtils.safeClose(con.clientConnection); con = hostData.availableConnections.poll(); } redistributeQueued(hostData); return; } //only do something if the connection is open. If it is closed then //the close setter will handle creating a new connection and decrementing //the connection count final ClientConnection connection = connectionHolder.clientConnection; if (connection.isOpen() && !connection.isUpgraded()) { CallbackHolder callback = hostData.awaitingConnections.poll(); while (callback != null && callback.isCancelled()) { callback = hostData.awaitingConnections.poll(); } if (callback != null) { if (callback.getTimeoutKey() != null) { callback.getTimeoutKey().remove(); } // Anything waiting for a connection is not expecting exclusivity. connectionReady(connectionHolder, callback.getCallback(), callback.getExchange(), false); } else { final int cachedConnectionCount = hostData.availableConnections.size(); if (cachedConnectionCount >= maxCachedConnections) { // Close the longest idle connection instead of the current one final ConnectionHolder holder = hostData.availableConnections.poll(); if (holder != null) { IoUtils.safeClose(holder.clientConnection); } } hostData.availableConnections.add(connectionHolder); // If the soft max and ttl are configured if (timeToLive > 0) { //we only start the timeout process once we have hit the core pool size //otherwise connections could start timing out immediately once the core pool size is hit //and if we never hit the core pool size then it does not make sense to start timers which are never //used (as timers are expensive) final long currentTime = System.currentTimeMillis(); connectionHolder.timeout = currentTime + timeToLive; if(hostData.availableConnections.size() > coreCachedConnections) { if (hostData.nextTimeout <= 0) { hostData.timeoutKey = WorkerUtils.executeAfter(connection.getIoThread(), hostData.timeoutTask, timeToLive, TimeUnit.MILLISECONDS); hostData.nextTimeout = connectionHolder.timeout; } } } } } else if (connection.isOpen() && connection.isUpgraded()) { //we treat upgraded connections as closed //as we do not want the connection pool filled with upgraded connections //if the connection is actually closed the close setter will handle it connection.getCloseSetter().set(null); handleClosedConnection(hostData, connectionHolder); } } private void handleClosedConnection(HostThreadData hostData, final ConnectionHolder connection) { openConnections.decrementAndGet(); int connections = --hostData.connections; hostData.availableConnections.remove(connection); if (connections < maxConnections) { CallbackHolder task = hostData.awaitingConnections.poll(); while (task != null && task.isCancelled()) { task = hostData.awaitingConnections.poll(); } if (task != null) { openConnection(task.exchange, task.callback, hostData, false); } } } private void openConnection(final HttpServerExchange exchange, final ProxyCallback<ProxyConnection> callback, final HostThreadData data, final boolean exclusive) { if (!exclusive) { data.connections++; } client.connect(new ClientCallback<ClientConnection>() { @Override public void completed(final ClientConnection result) { openConnections.incrementAndGet(); final ConnectionHolder connectionHolder = new ConnectionHolder(result); if (!exclusive) { result.getCloseSetter().set(new ChannelListener<ClientConnection>() { @Override public void handleEvent(ClientConnection channel) { handleClosedConnection(data, connectionHolder); } }); } connectionReady(connectionHolder, callback, exchange, exclusive); } @Override public void failed(IOException e) { if (!exclusive) { data.connections--; } UndertowLogger.REQUEST_LOGGER.debug("Failed to connect", e); if (!connectionPoolManager.handleError()) { redistributeQueued(getData()); scheduleFailedHostRetry(exchange); } callback.failed(exchange); } }, bindAddress, getUri(), exchange.getIoThread(), ssl, exchange.getConnection().getByteBufferPool(), options); } private void redistributeQueued(HostThreadData hostData) { CallbackHolder callback = hostData.awaitingConnections.poll(); while (callback != null) { if (callback.getTimeoutKey() != null) { callback.getTimeoutKey().remove(); } if (!callback.isCancelled()) { long time = System.currentTimeMillis(); if (callback.getExpireTime() > 0 && callback.getExpireTime() < time) { callback.getCallback().failed(callback.getExchange()); } else { callback.getCallback().queuedRequestFailed(callback.getExchange()); } } callback = hostData.awaitingConnections.poll(); } } private void connectionReady(final ConnectionHolder result, final ProxyCallback<ProxyConnection> callback, final HttpServerExchange exchange, final boolean exclusive) { try { exchange.addExchangeCompleteListener(new ExchangeCompletionListener() { @Override public void exchangeEvent(HttpServerExchange exchange, NextListener nextListener) { if (!exclusive) { returnConnection(result); } nextListener.proceed(); } }); } catch (Exception e) { returnConnection(result); callback.failed(exchange); return; } callback.completed(exchange, new ProxyConnection(result.clientConnection, uri.getPath() == null ? "/" : uri.getPath())); } public AvailabilityType available() { if (closed) { return AvailabilityType.CLOSED; } if (!connectionPoolManager.isAvailable()) { return AvailabilityType.PROBLEM; } HostThreadData data = getData(); if (data.connections < maxConnections) { return AvailabilityType.AVAILABLE; } if (!data.availableConnections.isEmpty()) { return AvailabilityType.AVAILABLE; } if (data.awaitingConnections.size() >= connectionPoolManager.getMaxQueueSize()) { return AvailabilityType.FULL_QUEUE; } return AvailabilityType.FULL; } /** * If a host fails we periodically retry * * @param exchange The server exchange */ private void scheduleFailedHostRetry(final HttpServerExchange exchange) { final int retry = connectionPoolManager.getProblemServerRetry(); // only schedule a retry task if the node is not available if (retry > 0 && !connectionPoolManager.isAvailable()) { WorkerUtils.executeAfter(exchange.getIoThread(), new Runnable() { @Override public void run() { if (closed) { return; } UndertowLogger.PROXY_REQUEST_LOGGER.debugf("Attempting to reconnect to failed host %s", getUri()); client.connect(new ClientCallback<ClientConnection>() { @Override public void completed(ClientConnection result) { UndertowLogger.PROXY_REQUEST_LOGGER.debugf("Connected to previously failed host %s, returning to service", getUri()); if (connectionPoolManager.clearError()) { // In case the node is available now, return the connection final ConnectionHolder connectionHolder = new ConnectionHolder(result); final HostThreadData data = getData(); result.getCloseSetter().set(new ChannelListener<ClientConnection>() { @Override public void handleEvent(ClientConnection channel) { handleClosedConnection(data, connectionHolder); } }); data.connections++; returnConnection(connectionHolder); } else { // Otherwise reschedule the retry task scheduleFailedHostRetry(exchange); } } @Override public void failed(IOException e) { UndertowLogger.PROXY_REQUEST_LOGGER.debugf("Failed to reconnect to failed host %s", getUri()); connectionPoolManager.handleError(); scheduleFailedHostRetry(exchange); } }, bindAddress, getUri(), exchange.getIoThread(), ssl, exchange.getConnection().getByteBufferPool(), options); } }, retry, TimeUnit.SECONDS); } } /** * Timeout idle connections which are above the soft max cached connections limit. * * @param currentTime the current time * @param data the local host thread data */ private void timeoutConnections(final long currentTime, final HostThreadData data) { int idleConnections = data.availableConnections.size(); for (;;) { ConnectionHolder holder; if (idleConnections > 0 && idleConnections > coreCachedConnections && (holder = data.availableConnections.peek()) != null) { if (!holder.clientConnection.isOpen()) { // Already closed connections decrease the available connections idleConnections--; } else if (currentTime >= holder.timeout) { // If the timeout is reached already, just close holder = data.availableConnections.poll(); IoUtils.safeClose(holder.clientConnection); idleConnections--; } else { if (data.timeoutKey != null) { data.timeoutKey.remove(); data.timeoutKey = null; } // Schedule a timeout task final long remaining = holder.timeout - currentTime + 1; data.nextTimeout = holder.timeout; data.timeoutKey = WorkerUtils.executeAfter(holder.clientConnection.getIoThread(), data.timeoutTask, remaining, TimeUnit.MILLISECONDS); return; } } else { // If we are below the soft limit, just cancel the task if (data.timeoutKey != null) { data.timeoutKey.remove(); data.timeoutKey = null; } data.nextTimeout = -1; return; } } } /** * Gets the host data for this thread * * @return The data for this thread */ private HostThreadData getData() { Thread thread = Thread.currentThread(); if (!(thread instanceof XnioIoThread)) { throw UndertowMessages.MESSAGES.canOnlyBeCalledByIoThread(); } XnioIoThread ioThread = (XnioIoThread) thread; HostThreadData data = hostThreadData.get(ioThread); if (data != null) { return data; } data = new HostThreadData(); HostThreadData existing = hostThreadData.putIfAbsent(ioThread, data); if (existing != null) { return existing; } return data; } public ClientStatistics getClientStatistics() { return new ClientStatistics() { @Override public long getRequests() { return requestCount.get(); } @Override public long getRead() { return read.get(); } @Override public long getWritten() { return written.get(); } @Override public void reset() { requestCount.set(0); read.set(0); written.set(0); } }; } /** * * @return The total number of open connections */ public int getOpenConnections() { return openConnections.get(); } /** * @param exclusive - Is connection for the exclusive use of one client? */ public void connect(ProxyClient.ProxyTarget proxyTarget, HttpServerExchange exchange, ProxyCallback<ProxyConnection> callback, final long timeout, final TimeUnit timeUnit, boolean exclusive) { HostThreadData data = getData(); ConnectionHolder connectionHolder = data.availableConnections.poll(); while (connectionHolder != null && !connectionHolder.clientConnection.isOpen()) { connectionHolder = data.availableConnections.poll(); } boolean upgradeRequest = exchange.getRequestHeaders().contains(Headers.UPGRADE); if (connectionHolder != null && (!upgradeRequest || connectionHolder.clientConnection.isUpgradeSupported())) { if (exclusive) { data.connections--; } connectionReady(connectionHolder, callback, exchange, exclusive); } else if (exclusive || data.connections < maxConnections) { openConnection(exchange, callback, data, exclusive); } else { // Reject the request directly if we reached the max request queue size if (data.awaitingConnections.size() >= connectionPoolManager.getMaxQueueSize()) { callback.queuedRequestFailed(exchange); return; } CallbackHolder holder; if (timeout > 0) { long time = System.currentTimeMillis(); holder = new CallbackHolder(proxyTarget, callback, exchange, time + timeUnit.toMillis(timeout)); holder.setTimeoutKey(WorkerUtils.executeAfter(exchange.getIoThread(), holder, timeout, timeUnit)); } else { holder = new CallbackHolder(proxyTarget, callback, exchange, -1); } data.awaitingConnections.add(holder); } } /** * Should only be used for tests. * */ void closeCurrentConnections() { final CountDownLatch latch = new CountDownLatch(hostThreadData.size()); for(final Map.Entry<XnioIoThread, HostThreadData> data : hostThreadData.entrySet()) { data.getKey().execute(new Runnable() { @Override public void run() { ConnectionHolder d = data.getValue().availableConnections.poll(); while (d != null) { IoUtils.safeClose(d.clientConnection); d = data.getValue().availableConnections.poll(); } data.getValue().connections = 0; latch.countDown(); } }); } try { latch.await(10, TimeUnit.SECONDS); } catch (InterruptedException e) { throw new RuntimeException(e); } } private final class HostThreadData { int connections = 0; XnioIoThread.Key timeoutKey; long nextTimeout = -1; final Deque<ConnectionHolder> availableConnections = new ArrayDeque<>(); final Deque<CallbackHolder> awaitingConnections = new ArrayDeque<>(); final Runnable timeoutTask = new Runnable() { @Override public void run() { final long currentTime = System.currentTimeMillis(); timeoutConnections(currentTime, HostThreadData.this); } }; } private static final class ConnectionHolder { private long timeout; private final ClientConnection clientConnection; private ConnectionHolder(ClientConnection clientConnection) { this.clientConnection = clientConnection; } } private static final class CallbackHolder implements Runnable { final ProxyClient.ProxyTarget proxyTarget; final ProxyCallback<ProxyConnection> callback; final HttpServerExchange exchange; final long expireTime; XnioExecutor.Key timeoutKey; boolean cancelled = false; private CallbackHolder(ProxyClient.ProxyTarget proxyTarget, ProxyCallback<ProxyConnection> callback, HttpServerExchange exchange, long expireTime) { this.proxyTarget = proxyTarget; this.callback = callback; this.exchange = exchange; this.expireTime = expireTime; } private ProxyCallback<ProxyConnection> getCallback() { return callback; } private HttpServerExchange getExchange() { return exchange; } private long getExpireTime() { return expireTime; } private XnioExecutor.Key getTimeoutKey() { return timeoutKey; } private boolean isCancelled() { return cancelled || exchange.isResponseStarted(); } private void setTimeoutKey(XnioExecutor.Key timeoutKey) { this.timeoutKey = timeoutKey; } @Override public void run() { cancelled = true; callback.failed(exchange); } public ProxyClient.ProxyTarget getProxyTarget() { return proxyTarget; } } public enum AvailabilityType { /** * The host is read to accept requests */ AVAILABLE, /** * The host is stopped. No request should be forwarded that are not tied * to this node via sticky sessions */ DRAIN, /** * All connections are in use, connections will be queued */ FULL, /** * All connections are in use and the queue is full. Requests will be rejected. */ FULL_QUEUE, /** * The host is probably down, only try as a last resort */ PROBLEM, /** * The host is closed. connections will always fail */ CLOSED; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.admin.statalerts; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import org.apache.geode.SystemFailure; /** * This class acts as a helper for the AlertManager & AlertAggregator for the execution of the user * specified functions * * This class also keeps a registry of all the functions which are supported, which should be used * during creation of alert definition. * */ public class FunctionHelper { private static final short FUN_AVG = 1; private static final short FUN_MIN = 2; private static final short FUN_MAX = 3; private static final short FUN_SUM = 4; private static final String STR_AVG = "Average"; private static final String STR_MIN = "Min Value"; private static final String STR_MAX = "Max Value"; private static final String STR_ADD = "Sum"; /** * This function returns the available function names. * * @return List of the function names. */ public static String[] getFunctionNames() { return new String[] {STR_ADD, STR_AVG, STR_MIN, STR_MAX}; } /** * This method returns the function's name for the requested function identifier. * * @param functionId Identifier of the function * @return Function name. */ public static String getFunctionName(short functionId) { switch (functionId) { case FUN_AVG: return STR_AVG; case FUN_MIN: return STR_MIN; case FUN_MAX: return STR_MAX; case FUN_SUM: return STR_ADD; default: return null; } } /** * This function returns the function identifier for the requested function name. * * @param qFunctionName Name of the function * @return Function identifier. */ public static short getFunctionIdentifier(String qFunctionName) { if (qFunctionName == null) return -1; if (qFunctionName.equalsIgnoreCase(STR_ADD)) return FUN_SUM; if (qFunctionName.equalsIgnoreCase(STR_AVG)) return FUN_AVG; if (qFunctionName.equalsIgnoreCase(STR_MIN)) return FUN_MIN; if (qFunctionName.equalsIgnoreCase(STR_MAX)) return FUN_MAX; return -1; } /** * Apply the given function of the given list of numbers and returns result * * @param functorId Id of function to be applied * @param vals List of number on which function will be applied * */ public static Number[] applyFunction(short functorId, Number[] vals) { Number[] res = new Number[1]; switch (functorId) { case FUN_SUM: res[0] = SUM(vals); return res; case FUN_AVG: res[0] = AVG(vals); return res; case FUN_MIN: res[0] = MIN(vals); return res; case FUN_MAX: res[0] = MAX(vals); return res; default: return null; } } /** * Apply the SUM function on given list of number * * @param vals Array of number */ public static Number SUM(Number[] vals) { try { double sum = 0.0; for (int i = 0; i < vals.length; i++) { sum = sum + vals[i].doubleValue(); } return Double.valueOf(sum); } catch (VirtualMachineError err) { SystemFailure.initiateFailure(err); // If this ever returns, rethrow the error. We're poisoned // now, so don't let this thread continue. throw err; } catch (Throwable t) { // Whenever you catch Error or Throwable, you must also // catch VirtualMachineError (see above). However, there is // _still_ a possibility that you are dealing with a cascading // error condition, so you also need to check to see if the JVM // is still usable: SystemFailure.checkFailure(); return null; } } /** * Apply the Average function on given list of number * * @param vals Array of number */ public static Number AVG(Number[] vals) { try { return Double.valueOf(SUM(vals).doubleValue() / vals.length); } catch (VirtualMachineError err) { SystemFailure.initiateFailure(err); // If this ever returns, rethrow the error. We're poisoned // now, so don't let this thread continue. throw err; } catch (Throwable ex) { // Whenever you catch Error or Throwable, you must also // catch VirtualMachineError (see above). However, there is // _still_ a possibility that you are dealing with a cascading // error condition, so you also need to check to see if the JVM // is still usable: SystemFailure.checkFailure(); return null; } } /** * Apply the Minimum function on given list of number * * @param vals Array of number */ public static Number MIN(Number[] vals) { try { Collection col = Arrays.asList(vals); Number min = (Number) Collections.max(col); return min; } catch (VirtualMachineError err) { SystemFailure.initiateFailure(err); // If this ever returns, rethrow the error. We're poisoned // now, so don't let this thread continue. throw err; } catch (Throwable t) { // Whenever you catch Error or Throwable, you must also // catch VirtualMachineError (see above). However, there is // _still_ a possibility that you are dealing with a cascading // error condition, so you also need to check to see if the JVM // is still usable: SystemFailure.checkFailure(); return null; } } /** * Apply the Maximum function on given list of number * * @param vals Array of number */ public static Number MAX(Number[] vals) { try { Collection col = Arrays.asList(vals); Number max = (Number) Collections.max(col); return max; } catch (VirtualMachineError err) { SystemFailure.initiateFailure(err); // If this ever returns, rethrow the error. We're poisoned // now, so don't let this thread continue. throw err; } catch (Throwable t) { // Whenever you catch Error or Throwable, you must also // catch VirtualMachineError (see above). However, there is // _still_ a possibility that you are dealing with a cascading // error condition, so you also need to check to see if the JVM // is still usable: SystemFailure.checkFailure(); return null; } } }
package com.connectordb_android.loggers; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.DatabaseUtils; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.net.wifi.SupplicantState; import android.net.wifi.WifiInfo; import android.net.wifi.WifiManager; import android.os.AsyncTask; import android.os.Handler; import android.util.Log; import java.util.ArrayList; import java.util.Iterator; import com.connectordb.client.ConnectorDB; import com.connectordb.client.RequestFailedException; import com.connectordb.client.Stream; /** * DatapointCache holds an SQLite database which manages all of the information needed to * perform data gathering and syncing to ConnectorDB in the background. * * The cache is used as a singleton using DatapointCache.get() * * The cache manages datapoints, streams, and is a key-value store for properties. * * The DatapointCache also manages synchronization with ConnectorDB */ public class DatapointCache extends SQLiteOpenHelper { public static final int DATABASE_VERSION = 4; public static final String TAG = "DatapointCache"; public static final String DATABASE_NAME = "DatapointCache.db"; public Context context; //The class is used as a singleton in the application private static DatapointCache datapointCache; public static synchronized DatapointCache get(Context c) { if (datapointCache == null) { if (c == null) { Log.e(TAG, "Context not supplied to DatapointCache!"); } Log.v(TAG, "Initializing Datapoint Cache"); datapointCache = new DatapointCache(c); } return datapointCache; } /** * Sets up the DatapointCache, and starts the syncer * @param context a context to use for the database. Remember * that the context needs to be long-lived. */ public DatapointCache(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); this.context = context; long syncenabled = 0; try { syncenabled = Long.parseLong(this.getKey("syncenabled",null)); } catch (NumberFormatException nfe) { } if (syncenabled > 0) { Log.i(TAG, "Sync is Enabled"); this.startSyncWait(); } else { Log.i(TAG, "Sync is disabled"); } } @Override public void onCreate(SQLiteDatabase db) { Log.v(TAG, "Creating new logger cache database"); db.execSQL( "CREATE TABLE streams (streamname TEXT PRIMARY KEY, schema TEXT, nickname TEXT, description TEXT, datatype TEXT, icon TEXT);"); db.execSQL("CREATE TABLE cache (streamname TEXT, timestamp REAL, data TEXT);"); db.execSQL("CREATE TABLE kv (key TEXT PRIMARY KEY, value TEXT);"); //Now fill in the default values in kv for syncing db.execSQL("INSERT INTO kv VALUES ('server','https://connectordb.com');"); db.execSQL("INSERT INTO kv VALUES ('devicename','');"); db.execSQL("INSERT INTO kv VALUES ('__apikey','');"); // The default synchronization period is 20 minutes, but sync is disabled db.execSQL("INSERT INTO kv VALUES ('syncperiod','1200000');"); db.execSQL("INSERT INTO kv VALUES ('syncenabled','0');"); // Sync is disabled by default } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { Log.w(TAG, "Upgrading Cache..."); // Get the old values that should be moved over String syncenabled = this.getKey("syncenabled", db); String server = this.getKey("server", db); String devicename = this.getKey("devicename", db); String apikey = this.getKey("__apikey", db); String syncperiod = this.getKey("syncperiod",db); // Drop the tables and rebuild them db.execSQL("DROP TABLE IF EXISTS cache;"); db.execSQL("DROP TABLE IF EXISTS streams;"); db.execSQL("DROP TABLE IF EXISTS kv;"); onCreate(db); // Set the values that we're moving over setKey("server", server, db); setKey("devicename", devicename, db); setKey("apikey", apikey, db); setKey("syncenabled", syncenabled, db); setKey("syncperiod",syncperiod,db); // Note: this loses the streams that are to be logged! } /** * getKey returns the value for the given key from the KV store. * @param key * @param db optional database to use (for setting in transactions) * @return the value - empty string if DNE */ public String getKey(String key, SQLiteDatabase db) { if (db == null) db = this.getReadableDatabase(); Cursor res = db.rawQuery("SELECT value FROM kv WHERE key=?;", new String[] { key }); if (res.getCount() == 0) { return ""; } else { res.moveToNext(); if (key.startsWith("__")) { Log.v(TAG, "Got: *****"); } else { Log.v(TAG, "Got: " + key + " " + res.getString(0)); } return res.getString(0); } } /** * Sets the given KV pair in the KV store * @param key * @param value * @param db optional database to use (for setting in transactions) */ public void setKey(String key, String value, SQLiteDatabase db) { if (db == null) db = this.getWritableDatabase(); if (key.startsWith("__")) { Log.v(TAG, "SET " + key + " TO ********"); } else { Log.v(TAG, "SET " + key + " TO " + value); } ContentValues contentValues = new ContentValues(); contentValues.put("key", key); contentValues.put("value", value); db.replace("kv", null, contentValues); } public void setCred(String server, String device, String apikey) { this.setKey("devicename", device, null); this.setKey("__apikey", apikey, null); this.setKey("server", server, null); } /** * The ssid of the currently connected network. */ public String getSSID() { WifiManager wifiManager = (WifiManager) context.getSystemService (Context.WIFI_SERVICE); WifiInfo info = wifiManager.getConnectionInfo (); if (info.getSupplicantState() == SupplicantState.COMPLETED) { String ssid = info.getSSID(); // https://code.google.com/p/android/issues/detail?id=43336 if (ssid.equals("0x") || ssid.equals("<unknown ssid>")) { return ""; } return ssid; } return ""; } /** * Here, you can set/get the SSID which is required * for a sync to happen. If connected to a different network, * the app will not sync. If the ssid is an empty string, * the app will always sync * @param ssid */ public void setSyncSSID(String ssid) { this.setKey("ssid_sync",ssid,null); } public String getSyncSSID() { return this.getKey("ssid_sync",null); } /** * ensureStream adds the stream to the DatapointCache. This will make the stream be created * if it doesn't exist, and synced to ConnectorDB * * @param stream the stream name * @param schema the jsonSchema * @param nickname the stream's nickname * @param description the stream's description * @param datatype connectorDB datatype for the stream * @param icon urlencoded icon to use for the stream */ public void ensureStream(String stream, String schema, String nickname, String description, String datatype, String icon) { Log.v(TAG, "Ensuring stream " + stream); SQLiteDatabase db = this.getWritableDatabase(); ContentValues contentValues = new ContentValues(); contentValues.put("streamname", stream); contentValues.put("schema", schema); contentValues.put("description", description); contentValues.put("nickname", nickname); contentValues.put("datatype", datatype); contentValues.put("icon", icon); db.insertWithOnConflict("streams", null, contentValues, SQLiteDatabase.CONFLICT_IGNORE); } /** * For use in transactions - do NOT keep this longer than necessary. * @return a writable sqlite database */ public SQLiteDatabase getDatabase() { return this.getWritableDatabase(); } /** * insert the given datapoint into the cache. When inserting datapoints, make sure to run * ensureStream first, to register the stream from which the datapoints come. Otherwise DatapointCache * won't recognize the points. * @param stream * @param timestamp * @param data * @param db An optional database (set to null) to use (for transactions) * @return whether insert was successful */ public synchronized boolean insert(String stream, long timestamp, String data, SQLiteDatabase db) { if (db == null) db = this.getWritableDatabase(); Log.v(TAG, "[s=" + stream + " t=" + Long.toString(timestamp) + " d=" + data + "]"); ContentValues contentValues = new ContentValues(); contentValues.put("streamname", stream); contentValues.put("timestamp", ((double) timestamp) / 1000.0); contentValues.put("data", data); db.insert("cache", null, contentValues); return true; } //Returns the number of cached datapoints public int size() { SQLiteDatabase db = this.getReadableDatabase(); int numRows = (int) DatabaseUtils.queryNumEntries(db, "cache"); Log.v(TAG, "Cache Size: " + Integer.toString(numRows)); return numRows; } // Deletes all cached datapoints public void clearCache() { SQLiteDatabase db = this.getWritableDatabase(); db.execSQL("DELETE FROM cache;"); } /** * All functions from now on handle synchronization with ConnectorBD */ final Handler handler = new Handler(); Runnable syncer = new Runnable() { public void run() { new AsyncTask<Void, Void, Void>() { @Override protected Void doInBackground(Void... params) { String syncSSID = getSyncSSID(); String curSSID = getSSID(); if (!syncSSID.equals(curSSID) && !syncSSID.isEmpty()) { Log.i(TAG,"Not syncing. Connected to " + curSSID + " but " + syncSSID + " is required."); } else { DatapointCache.this.sync(); } DatapointCache.this.startSyncWait(); return null; } }.execute(); } }; public void startSyncWait() { long waittime = Long.parseLong(this.getKey("syncperiod",null)); if (waittime > 0) { Log.v(TAG, "Setting next sync in " + waittime); handler.postDelayed(syncer, waittime); } } public void disableTimedSync() { Log.v(TAG, "Disabling syncer"); handler.removeCallbacks(syncer); this.setKey("syncenabled", "0", null); } /** * * @param time time in seconds between sync attempts */ public synchronized void enableTimedSync(long time) { disableTimedSync(); this.setKey("syncenabled", "1", null); this.setKey("syncperiod", Long.toString(time * 1000), null); startSyncWait(); } /** * * @return Whether or not background synchronization is currently enabled */ public synchronized boolean getSyncEnabled() { return this.getKey("syncenabled",null).equals("1"); } /** * * @return The time is seconds between sync attempts if sync is enabled */ public long getSyncTime() { return Long.parseLong(this.getKey("syncperiod",null)); } public void bgSync() { new AsyncTask<Void, Void, Void>() { @Override protected Void doInBackground(Void... params) { DatapointCache.this.sync(); return null; } }.execute(); } /* Certain loggers might want to perform a task before sync. For example, in certain cases, data is logged in the background, by android itself to conserve battery. Or the plugin gathers data from another android app, which stores it. In this case, these plugins will be called to perform their task before a sync is completed. */ public interface PreSyncer { public void preSync(); } private static ArrayList<PreSyncer> presync = new ArrayList<PreSyncer>(); public synchronized void addPreSync(PreSyncer p) { Log.v(TAG, "Added Presyncer"); presync.add(p); } //Synchronizes the database with the server public synchronized boolean sync() { Log.i(TAG, "Starting sync"); Log.v(TAG, "Running Presync tasks"); Iterator<PreSyncer> iter = presync.iterator(); while (iter.hasNext()) { iter.next().preSync(); } String server = this.getKey("server",null); String devicename = this.getKey("devicename",null); String apikey = this.getKey("__apikey",null); ConnectorDB cdb = new ConnectorDB("", apikey, server); try { // Try pinging the server - if it works, and the device names match, we're good to go! if (!cdb.ping().equals(devicename)) { throw new Exception("Devices not equal"); } // OK - we're good to go! SQLiteDatabase db = this.getWritableDatabase(); //For each stream in database Cursor res = db.rawQuery("SELECT streamname FROM streams", new String[] {}); int resultcount = res.getCount(); if (resultcount == 0) { Log.i(TAG, "No streams to sync"); return true; } for (int i = 0; i < resultcount; i++) { res.moveToNext(); String streamname = res.getString(0); Log.v(TAG, "Syncing stream " + streamname); // Get the datapoints for the stream - and don't include any weird future datapoints if they exist double queryTime = ((double) System.currentTimeMillis()) / 1000.0; Cursor dta = db.rawQuery( "SELECT timestamp,data FROM cache WHERE streamname=? AND timestamp <=? ORDER BY timestamp ASC;", new String[] { streamname, Double.toString(queryTime) }); int dtacount = dta.getCount(); if (dtacount > 0) { try { Stream s = cdb.getStream(devicename + "/" + streamname); } catch (RequestFailedException ex) { // The request failed. This is presumably because the stream doesn't exist. // therefore, we try creating it! Log.w(TAG, "Stream does not exist: " + streamname + " because error was " + ex.response.msg + ". Creating stream."); Cursor streamcursor = db.rawQuery("SELECT * FROM streams WHERE streamname=?", new String[] { streamname }); if (!streamcursor.moveToFirst()) { throw new Exception("STREAM DOES NOT EXIST IN DATABASE!"); } Stream s = new Stream(); s.setSchema(streamcursor.getString(streamcursor.getColumnIndex("schema"))); s.setDatatype(streamcursor.getString(streamcursor.getColumnIndex("datatype"))); s.setIcon(streamcursor.getString(streamcursor.getColumnIndex("icon"))); s.setNickname(streamcursor.getString(streamcursor.getColumnIndex("nickname"))); s.setDescription(streamcursor.getString(streamcursor.getColumnIndex("description"))); streamcursor.close(); cdb.createStream(devicename + "/" + streamname, s); } Log.i(TAG, "Writing " + dtacount + " datapoints to " + streamname); //Get the most recently inserted timestamp double oldtime = 0; String keyname = "sync_oldtime_" + streamname; try { oldtime = Double.parseDouble(getKey(keyname,null)); } catch (NumberFormatException nfe) { } // Now see if there exists a newer timestamp for the stream double streamtime = cdb.getMostRecentTimestamp(devicename + "/" + streamname); if (streamtime > oldtime) { Log.w(TAG, "Stream on server has newer timestamps! Skipping until time!"); oldtime = streamtime; } StringBuilder totaldata = new StringBuilder(); totaldata.append("["); for (int j = 0; j < dtacount; j++) { dta.moveToNext(); double timestamp = dta.getDouble(0); if (timestamp > oldtime) { oldtime = timestamp; totaldata.append("{\"t\": "); totaldata.append(timestamp); totaldata.append(", \"d\": "); totaldata.append(dta.getString(1)); totaldata.append("},"); } else { Log.w(TAG, streamname + ": Skipping duplicate timestamp"); } } String totaldatas = totaldata.toString(); totaldatas = totaldatas.substring(0, totaldata.length() - 1) + "]"; if (totaldatas.length() > 1) { cdb.insertJson(devicename + "/" + streamname, totaldatas); //Now delete the data from the cache db.execSQL("DELETE FROM cache WHERE streamname=? AND timestamp <=?", new Object[] { streamname, oldtime }); setKey(keyname, Double.toString(oldtime), null); } } } } catch (Exception ex) { Log.e(TAG, "sync: ", ex); return false; } Log.v(TAG, "Sync successful - " + Integer.toString(size()) + " datapoints left"); return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.segment; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; import static java.lang.Long.numberOfLeadingZeros; import static org.apache.jackrabbit.oak.api.Type.BINARIES; import static org.apache.jackrabbit.oak.api.Type.BINARY; import static org.apache.jackrabbit.oak.commons.PathUtils.concat; import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE; import static org.apache.jackrabbit.oak.segment.file.PriorityCache.nextPowerOfTwo; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; import com.google.common.base.Supplier; import com.google.common.hash.Hashing; import org.apache.jackrabbit.oak.api.Blob; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.commons.IOUtils; import org.apache.jackrabbit.oak.plugins.memory.BinaryPropertyState; import org.apache.jackrabbit.oak.plugins.memory.MultiBinaryPropertyState; import org.apache.jackrabbit.oak.plugins.memory.PropertyStates; import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions; import org.apache.jackrabbit.oak.segment.file.PriorityCache; import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.spi.state.ApplyDiff; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Tool for compacting segments. */ public class Compactor { /** Logger instance */ private static final Logger log = LoggerFactory.getLogger(Compactor.class); private static boolean eagerFlush = Boolean.getBoolean("oak.compaction.eagerFlush"); static { if (eagerFlush) { log.debug("Eager flush enabled."); } } private final SegmentReader reader; private final BlobStore blobStore; private final SegmentWriter writer; private final ProgressTracker progress = new ProgressTracker(); /** * Enables content based de-duplication of binaries. Involves a fair amount * of I/O when reading/comparing potentially equal blobs. */ private final boolean binaryDedup; /** * Set the upper bound for the content based de-duplication checks. */ private final long binaryDedupMaxSize; /** * Map from {@link #getBlobKey(Blob) blob keys} to matching compacted blob * record identifiers. Used to de-duplicate copies of the same binary * values. */ private final Map<String, List<RecordId>> binaries = newHashMap(); /** * Flag to use content equality verification before actually compacting the * state, on the childNodeChanged diff branch (Used in Backup scenario) */ private boolean contentEqualityCheck; /** * Allows the cancellation of the compaction process. If this * {@code Supplier} returns {@code true}, this compactor will cancel * compaction and return a partial {@code SegmentNodeState} containing the * changes compacted before the cancellation. */ private final Supplier<Boolean> cancel; private static final int cacheSize; static { Integer ci = Integer.getInteger("compress-interval"); Integer size = Integer.getInteger("oak.segment.compaction.cacheSize"); if (size != null) { cacheSize = size; } else if (ci != null) { log.warn("Deprecated argument 'compress-interval', please use 'oak.segment.compaction.cacheSize' instead."); cacheSize = ci; } else { cacheSize = 100000; } } /** * Deduplication cache for blobs. 10% of the total cache size. */ private final PriorityCache<RecordId, RecordId> blobCache = new PriorityCache<>((int) nextPowerOfTwo(cacheSize/10)); /** * Deduplication cache for nodes. 90% of the total cache size. */ private final PriorityCache<RecordId, RecordId> nodeCache = new PriorityCache<>((int) nextPowerOfTwo(cacheSize/10*9)); public Compactor(SegmentReader reader, SegmentWriter writer, BlobStore blobStore, Supplier<Boolean> cancel, SegmentGCOptions gc) { this.reader = reader; this.writer = writer; this.blobStore = blobStore; this.cancel = cancel; this.binaryDedup = gc.isBinaryDeduplication(); this.binaryDedupMaxSize = gc.getBinaryDeduplicationMaxSize(); } private SegmentNodeBuilder process(NodeState before, NodeState after, NodeState onto) throws IOException { SegmentNodeBuilder builder = new SegmentNodeBuilder( writer.writeNode(onto), writer); new CompactDiff(builder).diff(before, after); return builder; } /** * Compact the differences between a {@code before} and a {@code after} on * top of an {@code onto} state. * * @param before * the before state * @param after * the after state * @param onto * the onto state * @return the compacted state */ public SegmentNodeState compact(NodeState before, NodeState after, NodeState onto) throws IOException { progress.start(); SegmentNodeState compacted = process(before, after, onto) .getNodeState(); writer.flush(); progress.stop(); return compacted; } private class CompactDiff extends ApplyDiff { private IOException exception; /** * Current processed path, or null if the trace log is not enabled at * the beginning of the compaction call. The null check will also be * used to verify if a trace log will be needed or not */ private final String path; CompactDiff(NodeBuilder builder) { super(builder); if (log.isTraceEnabled()) { this.path = "/"; } else { this.path = null; } } private CompactDiff(NodeBuilder builder, String path, String childName) { super(builder); if (path != null) { this.path = concat(path, childName); } else { this.path = null; } } boolean diff(NodeState before, NodeState after) throws IOException { boolean success = after.compareAgainstBaseState(before, new CancelableDiff(this, cancel)); if (exception != null) { throw new IOException(exception); } return success; } @Override public boolean propertyAdded(PropertyState after) { if (path != null) { log.trace("propertyAdded {}/{}", path, after.getName()); } progress.onProperty(); return super.propertyAdded(compact(after)); } @Override public boolean propertyChanged(PropertyState before, PropertyState after) { if (path != null) { log.trace("propertyChanged {}/{}", path, after.getName()); } progress.onProperty(); return super.propertyChanged(before, compact(after)); } @Override public boolean childNodeAdded(String name, NodeState after) { if (path != null) { log.trace("childNodeAdded {}/{}", path, name); } RecordId id = null; if (after instanceof SegmentNodeState) { id = ((SegmentNodeState) after).getRecordId(); RecordId compactedId = nodeCache.get(id, 0); if (compactedId != null) { builder.setChildNode(name, new SegmentNodeState(reader, writer, compactedId)); return true; } } progress.onNode(); try { NodeBuilder child; if (eagerFlush) { child = builder.setChildNode(name); } else { child = EMPTY_NODE.builder(); } boolean success = new CompactDiff(child, path, name).diff( EMPTY_NODE, after); if (success) { SegmentNodeState state = writer.writeNode(child.getNodeState()); builder.setChildNode(name, state); if (id != null) { nodeCache.put(id, state.getRecordId(), 0, cost(state)); } } return success; } catch (IOException e) { exception = e; return false; } } @Override public boolean childNodeChanged(String name, NodeState before, NodeState after) { if (path != null) { log.trace("childNodeChanged {}/{}", path, name); } RecordId id = null; if (after instanceof SegmentNodeState) { id = ((SegmentNodeState) after).getRecordId(); RecordId compactedId = nodeCache.get(id, 0); if (compactedId != null) { builder.setChildNode(name, new SegmentNodeState(reader, writer, compactedId)); return true; } } if (contentEqualityCheck && before.equals(after)) { return true; } progress.onNode(); try { NodeBuilder child = builder.getChildNode(name); boolean success = new CompactDiff(child, path, name).diff( before, after); if (success) { SegmentNodeState state = writer.writeNode(child.getNodeState()); if (id != null) { nodeCache.put(id, state.getRecordId(), 0, cost(state)); } } return success; } catch (IOException e) { exception = e; return false; } } } private static byte cost(SegmentNodeState node) { long childCount = node.getChildNodeCount(Long.MAX_VALUE); return cost(childCount); } private static byte cost(SegmentBlob blob) { long length = blob.length(); return cost(length); } private static byte cost(long n) { return (byte) (Byte.MIN_VALUE + 64 - numberOfLeadingZeros(n)); } private PropertyState compact(PropertyState property) { String name = property.getName(); Type<?> type = property.getType(); if (type == BINARY) { Blob blob = compact(property.getValue(Type.BINARY)); return BinaryPropertyState.binaryProperty(name, blob); } else if (type == BINARIES) { List<Blob> blobs = new ArrayList<Blob>(); for (Blob blob : property.getValue(BINARIES)) { blobs.add(compact(blob)); } return MultiBinaryPropertyState.binaryPropertyFromBlob(name, blobs); } else { Object value = property.getValue(type); return PropertyStates.createProperty(name, value, type); } } /** * Compacts (and de-duplicates) the given blob. * * @param blob * blob to be compacted * @return compacted blob */ private Blob compact(Blob blob) { if (blob instanceof SegmentBlob) { SegmentBlob sb = (SegmentBlob) blob; try { // Check if we've already cloned this specific record RecordId id = sb.getRecordId(); // TODO verify binary impact on cache RecordId compactedId = blobCache.get(id, 0); if (compactedId != null) { return new SegmentBlob(blobStore, compactedId); } progress.onBinary(); // if the blob is external, just clone it if (sb.isExternal()) { return writer.writeBlob(sb); } // if the blob is inlined, just clone it if (sb.length() < Segment.MEDIUM_LIMIT) { SegmentBlob clone = writer.writeBlob(blob); blobCache.put(id, clone.getRecordId(), 0, cost(clone)); return clone; } List<RecordId> ids = null; String key = null; boolean dedup = binaryDedup && blob.length() <= binaryDedupMaxSize; if (dedup) { // alternatively look if the exact same binary has been // cloned key = getBlobKey(blob); ids = binaries.get(key); if (ids != null) { for (RecordId duplicateId : ids) { if (new SegmentBlob(blobStore, duplicateId) .equals(sb)) { return new SegmentBlob(blobStore, duplicateId); } } } } // if not, clone the large blob and keep track of the result sb = writer.writeBlob(blob); blobCache.put(id, sb.getRecordId(), 0, cost(sb)); if (dedup) { if (ids == null) { ids = newArrayList(); binaries.put(key, ids); } ids.add(sb.getRecordId()); } return sb; } catch (IOException e) { log.warn("Failed to compact a blob", e); // fall through } } // no way to compact this blob, so we'll just keep it as-is return blob; } private static String getBlobKey(Blob blob) throws IOException { InputStream stream = blob.getNewStream(); try { byte[] buffer = new byte[SegmentWriter.BLOCK_SIZE]; int n = IOUtils.readFully(stream, buffer, 0, buffer.length); return blob.length() + ":" + Hashing.sha1().hashBytes(buffer, 0, n); } finally { stream.close(); } } private static class ProgressTracker { private final long logAt = Long.getLong("compaction-progress-log", 150000); private long start = 0; private long nodes = 0; private long properties = 0; private long binaries = 0; void start() { nodes = 0; properties = 0; binaries = 0; start = System.currentTimeMillis(); } void onNode() { if (++nodes % logAt == 0) { logProgress(start, false); start = System.currentTimeMillis(); } } void onProperty() { properties++; } void onBinary() { binaries++; } void stop() { logProgress(start, true); } private void logProgress(long start, boolean done) { log.debug( "Compacted {} nodes, {} properties, {} binaries in {} ms.", nodes, properties, binaries, System.currentTimeMillis() - start); if (done) { log.info( "Finished compaction: {} nodes, {} properties, {} binaries.", nodes, properties, binaries); } } } public void setContentEqualityCheck(boolean contentEqualityCheck) { this.contentEqualityCheck = contentEqualityCheck; } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.bookkeeper.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.util.Enumeration; import java.util.concurrent.TimeUnit; import org.apache.bookkeeper.bookie.Bookie; import org.apache.bookkeeper.bookie.InterleavedLedgerStorage; import org.apache.bookkeeper.bookie.LedgerDirsManager; import org.apache.bookkeeper.client.BKException; import org.apache.bookkeeper.client.BookKeeper.DigestType; import org.apache.bookkeeper.client.LedgerEntry; import org.apache.bookkeeper.client.LedgerHandle; import org.apache.bookkeeper.conf.ServerConfiguration; import org.junit.Test; /** * Test to verify the readonly feature of bookies. */ public class ReadOnlyBookieTest extends BookKeeperClusterTestCase { public ReadOnlyBookieTest() { super(2); baseConf.setLedgerStorageClass(InterleavedLedgerStorage.class.getName()); baseConf.setEntryLogFilePreAllocationEnabled(false); baseConf.setMinUsableSizeForEntryLogCreation(Long.MAX_VALUE); } /** * Check readonly bookie. */ @Test public void testBookieShouldServeAsReadOnly() throws Exception { killBookie(0); baseConf.setReadOnlyModeEnabled(true); startNewBookie(); LedgerHandle ledger = bkc.createLedger(2, 2, DigestType.MAC, "".getBytes()); // Check new bookie with readonly mode enabled. File[] ledgerDirs = bsConfs.get(1).getLedgerDirs(); assertEquals("Only one ledger dir should be present", 1, ledgerDirs.length); Bookie bookie = bs.get(1).getBookie(); LedgerDirsManager ledgerDirsManager = bookie.getLedgerDirsManager(); for (int i = 0; i < 10; i++) { ledger.addEntry("data".getBytes()); } // Now add the current ledger dir to filled dirs list ledgerDirsManager.addToFilledDirs(new File(ledgerDirs[0], "current")); try { ledger.addEntry("data".getBytes()); fail("Should fail to add entry since there isn't enough bookies alive."); } catch (BKException.BKNotEnoughBookiesException e) { // Expected } assertTrue("Bookie should be running and converted to readonly mode", bookie.isRunning() && bookie.isReadOnly()); // Now kill the other bookie and read entries from the readonly bookie killBookie(0); Enumeration<LedgerEntry> readEntries = ledger.readEntries(0, 9); while (readEntries.hasMoreElements()) { LedgerEntry entry = readEntries.nextElement(); assertEquals("Entry should contain correct data", "data", new String(entry.getEntry())); } } @Test public void testBookieShouldTurnWritableFromReadOnly() throws Exception { killBookie(0); baseConf.setReadOnlyModeEnabled(true); startNewBookie(); LedgerHandle ledger = bkc.createLedger(2, 2, DigestType.MAC, "".getBytes()); // Check new bookie with readonly mode enabled. File[] ledgerDirs = bsConfs.get(1).getLedgerDirs(); assertEquals("Only one ledger dir should be present", 1, ledgerDirs.length); Bookie bookie = bs.get(1).getBookie(); LedgerDirsManager ledgerDirsManager = bookie.getLedgerDirsManager(); for (int i = 0; i < 10; i++) { ledger.addEntry("data".getBytes()); } File testDir = new File(ledgerDirs[0], "current"); // Now add the current ledger dir to filled dirs list ledgerDirsManager.addToFilledDirs(testDir); try { ledger.addEntry("data".getBytes()); fail("Should fail to add entry since there isn't enough bookies alive."); } catch (BKException.BKNotEnoughBookiesException e) { // Expected } bkc.waitForReadOnlyBookie(Bookie.getBookieAddress(bsConfs.get(1))) .get(30, TimeUnit.SECONDS); LOG.info("bookie is running {}, readonly {}.", bookie.isRunning(), bookie.isReadOnly()); assertTrue("Bookie should be running and converted to readonly mode", bookie.isRunning() && bookie.isReadOnly()); // should fail to create ledger try { bkc.createLedger(2, 2, DigestType.MAC, "".getBytes()); fail("Should fail to create a ledger since there isn't enough bookies alive."); } catch (BKException.BKNotEnoughBookiesException bke) { // Expected. } // Now add the current ledger dir back to writable dirs list ledgerDirsManager.addToWritableDirs(testDir, true); bkc.waitForWritableBookie(Bookie.getBookieAddress(bsConfs.get(1))) .get(30, TimeUnit.SECONDS); LOG.info("bookie is running {}, readonly {}.", bookie.isRunning(), bookie.isReadOnly()); assertTrue("Bookie should be running and converted back to writable mode", bookie.isRunning() && !bookie.isReadOnly()); LedgerHandle newLedger = bkc.createLedger(2, 2, DigestType.MAC, "".getBytes()); for (int i = 0; i < 10; i++) { newLedger.addEntry("data".getBytes()); } Enumeration<LedgerEntry> readEntries = newLedger.readEntries(0, 9); while (readEntries.hasMoreElements()) { LedgerEntry entry = readEntries.nextElement(); assertEquals("Entry should contain correct data", "data", new String(entry.getEntry())); } } /** * check readOnlyModeEnabled=false. */ @Test public void testBookieShutdownIfReadOnlyModeNotEnabled() throws Exception { killBookie(1); baseConf.setReadOnlyModeEnabled(false); startNewBookie(); File[] ledgerDirs = bsConfs.get(1).getLedgerDirs(); assertEquals("Only one ledger dir should be present", 1, ledgerDirs.length); Bookie bookie = bs.get(1).getBookie(); LedgerHandle ledger = bkc.createLedger(2, 2, DigestType.MAC, "".getBytes()); LedgerDirsManager ledgerDirsManager = bookie.getLedgerDirsManager(); for (int i = 0; i < 10; i++) { ledger.addEntry("data".getBytes()); } // Now add the current ledger dir to filled dirs list ledgerDirsManager.addToFilledDirs(new File(ledgerDirs[0], "current")); try { ledger.addEntry("data".getBytes()); fail("Should fail to add entry since there isn't enough bookies alive."); } catch (BKException.BKNotEnoughBookiesException e) { // Expected } // wait for up to 10 seconds for bookie to shut down for (int i = 0; i < 10 && bookie.isAlive(); i++) { Thread.sleep(1000); } assertFalse("Bookie should shutdown if readOnlyMode not enabled", bookie.isAlive()); } /** * Check multiple ledger dirs. */ @Test public void testBookieContinueWritingIfMultipleLedgersPresent() throws Exception { startNewBookieWithMultipleLedgerDirs(2); File[] ledgerDirs = bsConfs.get(1).getLedgerDirs(); assertEquals("Only one ledger dir should be present", 2, ledgerDirs.length); Bookie bookie = bs.get(1).getBookie(); LedgerHandle ledger = bkc.createLedger(2, 2, DigestType.MAC, "".getBytes()); LedgerDirsManager ledgerDirsManager = bookie.getLedgerDirsManager(); for (int i = 0; i < 10; i++) { ledger.addEntry("data".getBytes()); } // Now add the current ledger dir to filled dirs list ledgerDirsManager.addToFilledDirs(new File(ledgerDirs[0], "current")); for (int i = 0; i < 10; i++) { ledger.addEntry("data".getBytes()); } assertEquals("writable dirs should have one dir", 1, ledgerDirsManager .getWritableLedgerDirs().size()); assertTrue("Bookie should shutdown if readOnlyMode not enabled", bookie.isAlive()); } private void startNewBookieWithMultipleLedgerDirs(int numOfLedgerDirs) throws Exception { ServerConfiguration conf = bsConfs.get(1); killBookie(1); File[] ledgerDirs = new File[numOfLedgerDirs]; for (int i = 0; i < numOfLedgerDirs; i++) { File dir = createTempDir("bookie", "test"); tmpDirs.add(dir); ledgerDirs[i] = dir; } ServerConfiguration newConf = newServerConfiguration( conf.getBookiePort() + 1, ledgerDirs[0], ledgerDirs); bsConfs.add(newConf); bs.add(startBookie(newConf)); } /** * Test ledger creation with readonly bookies. */ @Test public void testLedgerCreationShouldFailWithReadonlyBookie() throws Exception { killBookie(1); baseConf.setReadOnlyModeEnabled(true); startNewBookie(); bs.get(1).getBookie().getStateManager().doTransitionToReadOnlyMode(); try { bkc.waitForReadOnlyBookie(Bookie.getBookieAddress(bsConfs.get(1))) .get(30, TimeUnit.SECONDS); bkc.createLedger(2, 2, DigestType.CRC32, "".getBytes()); fail("Must throw exception, as there is one readonly bookie"); } catch (BKException e) { // Expected } } /** * Try to read closed ledger from restarted ReadOnlyBookie. */ public void testReadFromReadOnlyBookieShouldBeSuccess() throws Exception { LedgerHandle ledger = bkc.createLedger(2, 2, DigestType.MAC, "".getBytes()); for (int i = 0; i < 10; i++) { ledger.addEntry("data".getBytes()); } ledger.close(); bsConfs.get(1).setReadOnlyModeEnabled(true); bsConfs.get(1).setDiskCheckInterval(500); restartBookies(); // Check new bookie with readonly mode enabled. File[] ledgerDirs = bsConfs.get(1).getLedgerDirs(); assertEquals("Only one ledger dir should be present", 1, ledgerDirs.length); Bookie bookie = bs.get(1).getBookie(); LedgerDirsManager ledgerDirsManager = bookie.getLedgerDirsManager(); // Now add the current ledger dir to filled dirs list ledgerDirsManager.addToFilledDirs(new File(ledgerDirs[0], "current")); // Wait till Bookie converts to ReadOnly mode. Thread.sleep(1000); assertTrue("Bookie should be converted to readonly mode", bookie.isRunning() && bookie.isReadOnly()); // Now kill the other bookie and read entries from the readonly bookie killBookie(0); Enumeration<LedgerEntry> readEntries = ledger.readEntries(0, 9); while (readEntries.hasMoreElements()) { LedgerEntry entry = readEntries.nextElement(); assertEquals("Entry should contain correct data", "data", new String(entry.getEntry())); } } }
/** * Copyright (c) 2014, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.csslayout; import javax.annotation.Nullable; import java.util.ArrayList; import com.facebook.infer.annotation.Assertions; /** * A CSS Node. It has a style object you can manipulate at {@link #style}. After calling * {@link #calculateLayout()}, {@link #layout} will be filled with the results of the layout. */ public class CSSNode { private static enum LayoutState { /** * Some property of this node or its children has changes and the current values in * {@link #layout} are not valid. */ DIRTY, /** * This node has a new layout relative to the last time {@link #markLayoutSeen()} was called. */ HAS_NEW_LAYOUT, /** * {@link #layout} is valid for the node's properties and this layout has been marked as * having been seen. */ UP_TO_DATE, } public static interface MeasureFunction { /** * Should measure the given node and put the result in the given MeasureOutput. * * NB: measure is NOT guaranteed to be threadsafe/re-entrant safe! */ public void measure(CSSNode node, float width, boolean isExactly, MeasureOutput measureOutput); } // VisibleForTesting protected final CSSStyle style = new CSSStyle(); /*package*/ final CSSLayout layout = new CSSLayout(); /*package*/ final CachedCSSLayout lastLayout = new CachedCSSLayout(); public int lineIndex = 0; private @Nullable ArrayList<CSSNode> mChildren; private @Nullable CSSNode mParent; private @Nullable MeasureFunction mMeasureFunction = null; private LayoutState mLayoutState = LayoutState.DIRTY; public int getChildCount() { return mChildren == null ? 0 : mChildren.size(); } public CSSNode getChildAt(int i) { Assertions.assertNotNull(mChildren); return mChildren.get(i); } public void addChildAt(CSSNode child, int i) { if (child.mParent != null) { throw new IllegalStateException("Child already has a parent, it must be removed first."); } if (mChildren == null) { // 4 is kinda arbitrary, but the default of 10 seems really high for an average View. mChildren = new ArrayList<CSSNode>(4); } mChildren.add(i, child); child.mParent = this; dirty(); } public CSSNode removeChildAt(int i) { Assertions.assertNotNull(mChildren); CSSNode removed = mChildren.remove(i); removed.mParent = null; dirty(); return removed; } public @Nullable CSSNode getParent() { return mParent; } /** * @return the index of the given child, or -1 if the child doesn't exist in this node. */ public int indexOf(CSSNode child) { Assertions.assertNotNull(mChildren); return mChildren.indexOf(child); } public void setMeasureFunction(MeasureFunction measureFunction) { if (!valuesEqual(mMeasureFunction, measureFunction)) { mMeasureFunction = measureFunction; dirty(); } } public boolean isMeasureDefined() { return mMeasureFunction != null; } /*package*/ MeasureOutput measure(MeasureOutput measureOutput, boolean isExactly, float width) { if (!isMeasureDefined()) { throw new RuntimeException("Measure function isn't defined!"); } measureOutput.height = CSSConstants.UNDEFINED; measureOutput.width = CSSConstants.UNDEFINED; Assertions.assertNotNull(mMeasureFunction).measure(this, width, isExactly, measureOutput); return measureOutput; } /** * Performs the actual layout and saves the results in {@link #layout} */ public void calculateLayout(CSSLayoutContext layoutContext) { layout.resetResult(); LayoutEngine.layoutNode(layoutContext, this, CSSConstants.UNDEFINED, null); } /** * See {@link LayoutState#DIRTY}. */ protected boolean isDirty() { return mLayoutState == LayoutState.DIRTY; } /** * See {@link LayoutState#HAS_NEW_LAYOUT}. */ public boolean hasNewLayout() { return mLayoutState == LayoutState.HAS_NEW_LAYOUT; } protected void dirty() { if (mLayoutState == LayoutState.DIRTY) { return; } else if (mLayoutState == LayoutState.HAS_NEW_LAYOUT) { throw new IllegalStateException("Previous layout was ignored! markLayoutSeen() never called"); } mLayoutState = LayoutState.DIRTY; if (mParent != null) { mParent.dirty(); } } /*package*/ void markHasNewLayout() { mLayoutState = LayoutState.HAS_NEW_LAYOUT; } /** * Tells the node that the current values in {@link #layout} have been seen. Subsequent calls * to {@link #hasNewLayout()} will return false until this node is laid out with new parameters. * You must call this each time the layout is generated if the node has a new layout. */ public void markLayoutSeen() { if (!hasNewLayout()) { throw new IllegalStateException("Expected node to have a new layout to be seen!"); } mLayoutState = LayoutState.UP_TO_DATE; } private void toStringWithIndentation(StringBuilder result, int level) { // Spaces and tabs are dropped by IntelliJ logcat integration, so rely on __ instead. StringBuilder indentation = new StringBuilder(); for (int i = 0; i < level; ++i) { indentation.append("__"); } result.append(indentation.toString()); result.append(layout.toString()); if (getChildCount() == 0) { return; } result.append(", children: [\n"); for (int i = 0; i < getChildCount(); i++) { getChildAt(i).toStringWithIndentation(result, level + 1); result.append("\n"); } result.append(indentation + "]"); } @Override public String toString() { StringBuilder sb = new StringBuilder(); this.toStringWithIndentation(sb, 0); return sb.toString(); } protected boolean valuesEqual(float f1, float f2) { return FloatUtil.floatsEqual(f1, f2); } protected <T> boolean valuesEqual(@Nullable T o1, @Nullable T o2) { if (o1 == null) { return o2 == null; } return o1.equals(o2); } public void setDirection(CSSDirection direction) { if (!valuesEqual(style.direction, direction)) { style.direction = direction; dirty(); } } public void setFlexDirection(CSSFlexDirection flexDirection) { if (!valuesEqual(style.flexDirection, flexDirection)) { style.flexDirection = flexDirection; dirty(); } } public void setJustifyContent(CSSJustify justifyContent) { if (!valuesEqual(style.justifyContent, justifyContent)) { style.justifyContent = justifyContent; dirty(); } } public void setAlignItems(CSSAlign alignItems) { if (!valuesEqual(style.alignItems, alignItems)) { style.alignItems = alignItems; dirty(); } } public void setAlignSelf(CSSAlign alignSelf) { if (!valuesEqual(style.alignSelf, alignSelf)) { style.alignSelf = alignSelf; dirty(); } } public void setPositionType(CSSPositionType positionType) { if (!valuesEqual(style.positionType, positionType)) { style.positionType = positionType; dirty(); } } public void setWrap(CSSWrap flexWrap) { if (!valuesEqual(style.flexWrap, flexWrap)) { style.flexWrap = flexWrap; dirty(); } } public void setFlex(float flex) { if (!valuesEqual(style.flex, flex)) { style.flex = flex; dirty(); } } public void setMargin(int spacingType, float margin) { if (style.margin.set(spacingType, margin)) { dirty(); } } public void setPadding(int spacingType, float padding) { if (style.padding.set(spacingType, padding)) { dirty(); } } public void setBorder(int spacingType, float border) { if (style.border.set(spacingType, border)) { dirty(); } } public void setPositionTop(float positionTop) { if (!valuesEqual(style.positionTop, positionTop)) { style.positionTop = positionTop; dirty(); } } public void setPositionBottom(float positionBottom) { if (!valuesEqual(style.positionBottom, positionBottom)) { style.positionBottom = positionBottom; dirty(); } } public void setPositionLeft(float positionLeft) { if (!valuesEqual(style.positionLeft, positionLeft)) { style.positionLeft = positionLeft; dirty(); } } public void setPositionRight(float positionRight) { if (!valuesEqual(style.positionRight, positionRight)) { style.positionRight = positionRight; dirty(); } } public void setStyleWidth(float width) { if (!valuesEqual(style.width, width)) { style.width = width; dirty(); } } public void setStyleHeight(float height) { if (!valuesEqual(style.height, height)) { style.height = height; dirty(); } } public float getLayoutX() { return layout.left; } public float getLayoutY() { return layout.top; } public float getLayoutWidth() { return layout.width; } public float getLayoutHeight() { return layout.height; } public CSSDirection getLayoutDirection() { return layout.direction; } /** * Get this node's padding, as defined by style + default padding. */ public Spacing getStylePadding() { return style.padding; } /** * Get this node's width, as defined in the style. */ public float getStyleWidth() { return style.width; } /** * Get this node's height, as defined in the style. */ public float getStyleHeight() { return style.height; } /** * Get this node's direction, as defined in the style. */ public CSSDirection getStyleDirection() { return style.direction; } /** * Set a default padding (left/top/right/bottom) for this node. */ public void setDefaultPadding(int spacingType, float padding) { if (style.padding.setDefault(spacingType, padding)) { dirty(); } } }
/** * Copyright 2007 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.spy; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.CountDownLatch; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.metrics.RegionServerMetrics; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.util.Bytes; import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** * Test compactions */ @Category(SmallTests.class) public class TestCompaction extends HBaseTestCase { static final Log LOG = LogFactory.getLog(TestCompaction.class.getName()); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private HRegion r = null; private HTableDescriptor htd = null; private Path compactionDir = null; private Path regionCompactionDir = null; private static final byte [] COLUMN_FAMILY = fam1; private final byte [] STARTROW = Bytes.toBytes(START_KEY); private static final byte [] COLUMN_FAMILY_TEXT = COLUMN_FAMILY; private int compactionThreshold; private byte[] firstRowBytes, secondRowBytes, thirdRowBytes; final private byte[] col1, col2; private static final long MAX_FILES_TO_COMPACT = 10; /** constructor */ public TestCompaction() throws Exception { super(); // Set cache flush size to 1MB conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024*1024); conf.setInt("hbase.hregion.memstore.block.multiplier", 100); compactionThreshold = conf.getInt("hbase.hstore.compactionThreshold", 3); firstRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); secondRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); // Increment the least significant character so we get to next row. secondRowBytes[START_KEY_BYTES.length - 1]++; thirdRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); thirdRowBytes[START_KEY_BYTES.length - 1]++; thirdRowBytes[START_KEY_BYTES.length - 1]++; col1 = "column1".getBytes(HConstants.UTF8_ENCODING); col2 = "column2".getBytes(HConstants.UTF8_ENCODING); } @Override public void setUp() throws Exception { super.setUp(); this.htd = createTableDescriptor(getName()); this.r = createNewHRegion(htd, null, null); } @Override public void tearDown() throws Exception { HLog hlog = r.getLog(); this.r.close(); hlog.closeAndDelete(); super.tearDown(); } /** * Test that on a major compaction, if all cells are expired or deleted, then * we'll end up with no product. Make sure scanner over region returns * right answer in this case - and that it just basically works. * @throws IOException */ public void testMajorCompactingToNoOutput() throws IOException { createStoreFile(r); for (int i = 0; i < compactionThreshold; i++) { createStoreFile(r); } // Now delete everything. InternalScanner s = r.getScanner(new Scan()); do { List<KeyValue> results = new ArrayList<KeyValue>(); boolean result = s.next(results); r.delete(new Delete(results.get(0).getRow()), null, false); if (!result) break; } while(true); s.close(); // Flush r.flushcache(); // Major compact. r.compactStores(true); s = r.getScanner(new Scan()); int counter = 0; do { List<KeyValue> results = new ArrayList<KeyValue>(); boolean result = s.next(results); if (!result) break; counter++; } while(true); assertEquals(0, counter); } /** * Run compaction and flushing memstore * Assert deletes get cleaned up. * @throws Exception */ public void testMajorCompaction() throws Exception { majorCompaction(); } public void testDataBlockEncodingInCacheOnly() throws Exception { majorCompactionWithDataBlockEncoding(true); } public void testDataBlockEncodingEverywhere() throws Exception { majorCompactionWithDataBlockEncoding(false); } public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly) throws Exception { Map<Store, HFileDataBlockEncoder> replaceBlockCache = new HashMap<Store, HFileDataBlockEncoder>(); for (Entry<byte[], Store> pair : r.getStores().entrySet()) { Store store = pair.getValue(); HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder(); replaceBlockCache.put(pair.getValue(), blockEncoder); final DataBlockEncoding inCache = DataBlockEncoding.PREFIX; final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE : inCache; store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl( onDisk, inCache)); } majorCompaction(); // restore settings for (Entry<Store, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) { entry.getKey().setDataBlockEncoderInTest(entry.getValue()); } } private void majorCompaction() throws Exception { createStoreFile(r); for (int i = 0; i < compactionThreshold; i++) { createStoreFile(r); } // Add more content. addContent(new HRegionIncommon(r), Bytes.toString(COLUMN_FAMILY)); // Now there are about 5 versions of each column. // Default is that there only 3 (MAXVERSIONS) versions allowed per column. // // Assert == 3 when we ask for versions. Result result = r.get(new Get(STARTROW).addFamily(COLUMN_FAMILY_TEXT).setMaxVersions(100), null); assertEquals(compactionThreshold, result.size()); // see if CompactionProgress is in place but null for (Store store: this.r.stores.values()) { assertNull(store.getCompactionProgress()); } r.flushcache(); r.compactStores(true); // see if CompactionProgress has done its thing on at least one store int storeCount = 0; for (Store store: this.r.stores.values()) { CompactionProgress progress = store.getCompactionProgress(); if( progress != null ) { ++storeCount; assertTrue(progress.currentCompactedKVs > 0); assertTrue(progress.totalCompactingKVs > 0); } assertTrue(storeCount > 0); } // look at the second row // Increment the least significant character so we get to next row. byte [] secondRowBytes = START_KEY.getBytes(HConstants.UTF8_ENCODING); secondRowBytes[START_KEY_BYTES.length - 1]++; // Always 3 versions if that is what max versions is. result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT). setMaxVersions(100), null); LOG.debug("Row " + Bytes.toStringBinary(secondRowBytes) + " after " + "initial compaction: " + result); assertEquals("Invalid number of versions of row " + Bytes.toStringBinary(secondRowBytes) + ".", compactionThreshold, result.size()); // Now add deletes to memstore and then flush it. // That will put us over // the compaction threshold of 3 store files. Compacting these store files // should result in a compacted store file that has no references to the // deleted row. LOG.debug("Adding deletes to memstore and flushing"); Delete delete = new Delete(secondRowBytes, System.currentTimeMillis(), null); byte [][] famAndQf = {COLUMN_FAMILY, null}; delete.deleteFamily(famAndQf[0]); r.delete(delete, null, true); // Assert deleted. result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).setMaxVersions(100), null ); assertTrue("Second row should have been deleted", result.isEmpty()); r.flushcache(); result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).setMaxVersions(100), null ); assertTrue("Second row should have been deleted", result.isEmpty()); // Add a bit of data and flush. Start adding at 'bbb'. createSmallerStoreFile(this.r); r.flushcache(); // Assert that the second row is still deleted. result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).setMaxVersions(100), null ); assertTrue("Second row should still be deleted", result.isEmpty()); // Force major compaction. r.compactStores(true); assertEquals(r.getStore(COLUMN_FAMILY_TEXT).getStorefiles().size(), 1); result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).setMaxVersions(100), null ); assertTrue("Second row should still be deleted", result.isEmpty()); // Make sure the store files do have some 'aaa' keys in them -- exactly 3. // Also, that compacted store files do not have any secondRowBytes because // they were deleted. verifyCounts(3,0); // Multiple versions allowed for an entry, so the delete isn't enough // Lower TTL and expire to ensure that all our entries have been wiped final int ttl = 1000; for (Store store: this.r.stores.values()) { Store.ScanInfo old = store.scanInfo; Store.ScanInfo si = new Store.ScanInfo(old.getFamily(), old.getMinVersions(), old.getMaxVersions(), ttl, old.getKeepDeletedCells(), 0, old.getComparator()); store.scanInfo = si; } Thread.sleep(1000); r.compactStores(true); int count = count(); assertEquals("Should not see anything after TTL has expired", 0, count); } public void testTimeBasedMajorCompaction() throws Exception { // create 2 storefiles and force a major compaction to reset the time int delay = 10 * 1000; // 10 sec float jitterPct = 0.20f; // 20% conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, delay); conf.setFloat("hbase.hregion.majorcompaction.jitter", jitterPct); Store s = r.getStore(COLUMN_FAMILY); try { createStoreFile(r); createStoreFile(r); r.compactStores(true); // add one more file & verify that a regular compaction won't work createStoreFile(r); r.compactStores(false); assertEquals(2, s.getStorefilesCount()); // ensure that major compaction time is deterministic long mcTime = s.getNextMajorCompactTime(); for (int i = 0; i < 10; ++i) { assertEquals(mcTime, s.getNextMajorCompactTime()); } // ensure that the major compaction time is within the variance long jitter = Math.round(delay * jitterPct); assertTrue(delay - jitter <= mcTime && mcTime <= delay + jitter); // wait until the time-based compaction interval Thread.sleep(mcTime); // trigger a compaction request and ensure that it's upgraded to major r.compactStores(false); assertEquals(1, s.getStorefilesCount()); } finally { // reset the timed compaction settings conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 1000*60*60*24); conf.setFloat("hbase.hregion.majorcompaction.jitter", 0.20F); // run a major to reset the cache createStoreFile(r); r.compactStores(true); assertEquals(1, s.getStorefilesCount()); } } public void testMinorCompactionWithDeleteRow() throws Exception { Delete deleteRow = new Delete(secondRowBytes); testMinorCompactionWithDelete(deleteRow); } public void testMinorCompactionWithDeleteColumn1() throws Exception { Delete dc = new Delete(secondRowBytes); /* delete all timestamps in the column */ dc.deleteColumns(fam2, col2); testMinorCompactionWithDelete(dc); } public void testMinorCompactionWithDeleteColumn2() throws Exception { Delete dc = new Delete(secondRowBytes); dc.deleteColumn(fam2, col2); /* compactionThreshold is 3. The table has 4 versions: 0, 1, 2, and 3. * we only delete the latest version. One might expect to see only * versions 1 and 2. HBase differs, and gives us 0, 1 and 2. * This is okay as well. Since there was no compaction done before the * delete, version 0 seems to stay on. */ //testMinorCompactionWithDelete(dc, 2); testMinorCompactionWithDelete(dc, 3); } public void testMinorCompactionWithDeleteColumnFamily() throws Exception { Delete deleteCF = new Delete(secondRowBytes); deleteCF.deleteFamily(fam2); testMinorCompactionWithDelete(deleteCF); } public void testMinorCompactionWithDeleteVersion1() throws Exception { Delete deleteVersion = new Delete(secondRowBytes); deleteVersion.deleteColumns(fam2, col2, 2); /* compactionThreshold is 3. The table has 4 versions: 0, 1, 2, and 3. * We delete versions 0 ... 2. So, we still have one remaining. */ testMinorCompactionWithDelete(deleteVersion, 1); } public void testMinorCompactionWithDeleteVersion2() throws Exception { Delete deleteVersion = new Delete(secondRowBytes); deleteVersion.deleteColumn(fam2, col2, 1); /* * the table has 4 versions: 0, 1, 2, and 3. * We delete 1. * Should have 3 remaining. */ testMinorCompactionWithDelete(deleteVersion, 3); } /* * A helper function to test the minor compaction algorithm. We check that * the delete markers are left behind. Takes delete as an argument, which * can be any delete (row, column, columnfamliy etc), that essentially * deletes row2 and column2. row1 and column1 should be undeleted */ private void testMinorCompactionWithDelete(Delete delete) throws Exception { testMinorCompactionWithDelete(delete, 0); } private void testMinorCompactionWithDelete(Delete delete, int expectedResultsAfterDelete) throws Exception { HRegionIncommon loader = new HRegionIncommon(r); for (int i = 0; i < compactionThreshold + 1; i++) { addContent(loader, Bytes.toString(fam1), Bytes.toString(col1), firstRowBytes, thirdRowBytes, i); addContent(loader, Bytes.toString(fam1), Bytes.toString(col2), firstRowBytes, thirdRowBytes, i); addContent(loader, Bytes.toString(fam2), Bytes.toString(col1), firstRowBytes, thirdRowBytes, i); addContent(loader, Bytes.toString(fam2), Bytes.toString(col2), firstRowBytes, thirdRowBytes, i); r.flushcache(); } Result result = r.get(new Get(firstRowBytes).addColumn(fam1, col1).setMaxVersions(100), null); assertEquals(compactionThreshold, result.size()); result = r.get(new Get(secondRowBytes).addColumn(fam2, col2).setMaxVersions(100), null); assertEquals(compactionThreshold, result.size()); // Now add deletes to memstore and then flush it. That will put us over // the compaction threshold of 3 store files. Compacting these store files // should result in a compacted store file that has no references to the // deleted row. r.delete(delete, null, true); // Make sure that we have only deleted family2 from secondRowBytes result = r.get(new Get(secondRowBytes).addColumn(fam2, col2).setMaxVersions(100), null); assertEquals(expectedResultsAfterDelete, result.size()); // but we still have firstrow result = r.get(new Get(firstRowBytes).addColumn(fam1, col1).setMaxVersions(100), null); assertEquals(compactionThreshold, result.size()); r.flushcache(); // should not change anything. // Let us check again // Make sure that we have only deleted family2 from secondRowBytes result = r.get(new Get(secondRowBytes).addColumn(fam2, col2).setMaxVersions(100), null); assertEquals(expectedResultsAfterDelete, result.size()); // but we still have firstrow result = r.get(new Get(firstRowBytes).addColumn(fam1, col1).setMaxVersions(100), null); assertEquals(compactionThreshold, result.size()); // do a compaction Store store2 = this.r.stores.get(fam2); int numFiles1 = store2.getStorefiles().size(); assertTrue("Was expecting to see 4 store files", numFiles1 > compactionThreshold); // > 3 store2.compactRecentForTesting(compactionThreshold); // = 3 int numFiles2 = store2.getStorefiles().size(); // Check that we did compact assertTrue("Number of store files should go down", numFiles1 > numFiles2); // Check that it was a minor compaction. assertTrue("Was not supposed to be a major compaction", numFiles2 > 1); // Make sure that we have only deleted family2 from secondRowBytes result = r.get(new Get(secondRowBytes).addColumn(fam2, col2).setMaxVersions(100), null); assertEquals(expectedResultsAfterDelete, result.size()); // but we still have firstrow result = r.get(new Get(firstRowBytes).addColumn(fam1, col1).setMaxVersions(100), null); assertEquals(compactionThreshold, result.size()); } private void verifyCounts(int countRow1, int countRow2) throws Exception { int count1 = 0; int count2 = 0; for (StoreFile f: this.r.stores.get(COLUMN_FAMILY_TEXT).getStorefiles()) { HFileScanner scanner = f.getReader().getScanner(false, false); scanner.seekTo(); do { byte [] row = scanner.getKeyValue().getRow(); if (Bytes.equals(row, STARTROW)) { count1++; } else if(Bytes.equals(row, secondRowBytes)) { count2++; } } while(scanner.next()); } assertEquals(countRow1,count1); assertEquals(countRow2,count2); } /** * Verify that you can stop a long-running compaction * (used during RS shutdown) * @throws Exception */ public void testInterruptCompaction() throws Exception { assertEquals(0, count()); // lower the polling interval for this test int origWI = Store.closeCheckInterval; Store.closeCheckInterval = 10*1000; // 10 KB try { // Create a couple store files w/ 15KB (over 10KB interval) int jmax = (int) Math.ceil(15.0/compactionThreshold); byte [] pad = new byte[1000]; // 1 KB chunk for (int i = 0; i < compactionThreshold; i++) { HRegionIncommon loader = new HRegionIncommon(r); Put p = new Put(Bytes.add(STARTROW, Bytes.toBytes(i))); p.setWriteToWAL(false); for (int j = 0; j < jmax; j++) { p.add(COLUMN_FAMILY, Bytes.toBytes(j), pad); } addContent(loader, Bytes.toString(COLUMN_FAMILY)); loader.put(p); loader.flushcache(); } HRegion spyR = spy(r); doAnswer(new Answer() { public Object answer(InvocationOnMock invocation) throws Throwable { r.writestate.writesEnabled = false; return invocation.callRealMethod(); } }).when(spyR).doRegionCompactionPrep(); // force a minor compaction, but not before requesting a stop spyR.compactStores(); // ensure that the compaction stopped, all old files are intact, Store s = r.stores.get(COLUMN_FAMILY); assertEquals(compactionThreshold, s.getStorefilesCount()); assertTrue(s.getStorefilesSize() > 15*1000); // and no new store files persisted past compactStores() FileStatus[] ls = FileSystem.get(conf).listStatus(r.getTmpDir()); assertEquals(0, ls.length); } finally { // don't mess up future tests r.writestate.writesEnabled = true; Store.closeCheckInterval = origWI; // Delete all Store information once done using for (int i = 0; i < compactionThreshold; i++) { Delete delete = new Delete(Bytes.add(STARTROW, Bytes.toBytes(i))); byte [][] famAndQf = {COLUMN_FAMILY, null}; delete.deleteFamily(famAndQf[0]); r.delete(delete, null, true); } r.flushcache(); // Multiple versions allowed for an entry, so the delete isn't enough // Lower TTL and expire to ensure that all our entries have been wiped final int ttl = 1000; for (Store store: this.r.stores.values()) { Store.ScanInfo old = store.scanInfo; Store.ScanInfo si = new Store.ScanInfo(old.getFamily(), old.getMinVersions(), old.getMaxVersions(), ttl, old.getKeepDeletedCells(), 0, old.getComparator()); store.scanInfo = si; } Thread.sleep(ttl); r.compactStores(true); assertEquals(0, count()); } } private int count() throws IOException { int count = 0; for (StoreFile f: this.r.stores. get(COLUMN_FAMILY_TEXT).getStorefiles()) { HFileScanner scanner = f.getReader().getScanner(false, false); if (!scanner.seekTo()) { continue; } do { count++; } while(scanner.next()); } return count; } private void createStoreFile(final HRegion region) throws IOException { createStoreFile(region, Bytes.toString(COLUMN_FAMILY)); } private void createStoreFile(final HRegion region, String family) throws IOException { HRegionIncommon loader = new HRegionIncommon(region); addContent(loader, family); loader.flushcache(); } private void createSmallerStoreFile(final HRegion region) throws IOException { HRegionIncommon loader = new HRegionIncommon(region); addContent(loader, Bytes.toString(COLUMN_FAMILY), ("" + "bbb").getBytes(), null); loader.flushcache(); } public void testCompactionWithCorruptResult() throws Exception { int nfiles = 10; for (int i = 0; i < nfiles; i++) { createStoreFile(r); } Store store = r.getStore(COLUMN_FAMILY); List<StoreFile> storeFiles = store.getStorefiles(); long maxId = StoreFile.getMaxSequenceIdInList(storeFiles, true); Compactor tool = new Compactor(this.conf); StoreFile.Writer compactedFile = tool.compactForTesting(store, this.conf, storeFiles, false, maxId); // Now lets corrupt the compacted file. FileSystem fs = FileSystem.get(conf); Path origPath = compactedFile.getPath(); Path homedir = store.getHomedir(); Path dstPath = new Path(homedir, origPath.getName()); FSDataOutputStream stream = fs.create(origPath, null, true, 512, (short) 3, (long) 1024, null); stream.writeChars("CORRUPT FILE!!!!"); stream.close(); try { store.completeCompaction(storeFiles, compactedFile); } catch (Exception e) { // The complete compaction should fail and the corrupt file should remain // in the 'tmp' directory; assert (fs.exists(origPath)); assert (!fs.exists(dstPath)); System.out.println("testCompactionWithCorruptResult Passed"); return; } fail("testCompactionWithCorruptResult failed since no exception was" + "thrown while completing a corrupt file"); } /** * Test for HBASE-5920 - Test user requested major compactions always occurring */ public void testNonUserMajorCompactionRequest() throws Exception { Store store = r.getStore(COLUMN_FAMILY); createStoreFile(r); for (int i = 0; i < MAX_FILES_TO_COMPACT + 1; i++) { createStoreFile(r); } store.triggerMajorCompaction(); CompactionRequest request = store.requestCompaction(Store.NO_PRIORITY, null); assertNotNull("Expected to receive a compaction request", request); assertEquals( "System-requested major compaction should not occur if there are too many store files", false, request.isMajor()); } /** * Test for HBASE-5920 */ public void testUserMajorCompactionRequest() throws IOException{ Store store = r.getStore(COLUMN_FAMILY); createStoreFile(r); for (int i = 0; i < MAX_FILES_TO_COMPACT + 1; i++) { createStoreFile(r); } store.triggerMajorCompaction(); CompactionRequest request = store.requestCompaction(Store.PRIORITY_USER, null); assertNotNull("Expected to receive a compaction request", request); assertEquals( "User-requested major compaction should always occur, even if there are too many store files", true, request.isMajor()); } /** * Create a custom compaction request and be sure that we can track it through the queue, knowing * when the compaction is completed. */ public void testTrackingCompactionRequest() throws Exception { // setup a compact/split thread on a mock server HRegionServer mockServer = Mockito.mock(HRegionServer.class); Mockito.when(mockServer.getConfiguration()).thenReturn(r.getConf()); CompactSplitThread thread = new CompactSplitThread(mockServer); Mockito.when(mockServer.getCompactSplitThread()).thenReturn(thread); // simple stop for the metrics - we ignore any updates in the test RegionServerMetrics mockMetrics = Mockito.mock(RegionServerMetrics.class); Mockito.when(mockServer.getMetrics()).thenReturn(mockMetrics); // setup a region/store with some files Store store = r.getStore(COLUMN_FAMILY); createStoreFile(r); for (int i = 0; i < MAX_FILES_TO_COMPACT + 1; i++) { createStoreFile(r); } CountDownLatch latch = new CountDownLatch(1); TrackableCompactionRequest request = new TrackableCompactionRequest(r, store, latch); thread.requestCompaction(r, store, "test custom comapction", Store.PRIORITY_USER, request); // wait for the latch to complete. latch.await(); thread.interruptIfNecessary(); } public void testMultipleCustomCompactionRequests() throws Exception { // setup a compact/split thread on a mock server HRegionServer mockServer = Mockito.mock(HRegionServer.class); Mockito.when(mockServer.getConfiguration()).thenReturn(r.getConf()); CompactSplitThread thread = new CompactSplitThread(mockServer); Mockito.when(mockServer.getCompactSplitThread()).thenReturn(thread); // simple stop for the metrics - we ignore any updates in the test RegionServerMetrics mockMetrics = Mockito.mock(RegionServerMetrics.class); Mockito.when(mockServer.getMetrics()).thenReturn(mockMetrics); // setup a region/store with some files int numStores = r.getStores().size(); List<CompactionRequest> requests = new ArrayList<CompactionRequest>(numStores); CountDownLatch latch = new CountDownLatch(numStores); // create some store files and setup requests for each store on which we want to do a // compaction for (Store store : r.getStores().values()) { createStoreFile(r, store.getColumnFamilyName()); createStoreFile(r, store.getColumnFamilyName()); createStoreFile(r, store.getColumnFamilyName()); requests.add(new TrackableCompactionRequest(r, store, latch)); } thread.requestCompaction(r, "test mulitple custom comapctions", Store.PRIORITY_USER, Collections.unmodifiableList(requests)); // wait for the latch to complete. latch.await(); thread.interruptIfNecessary(); } /** * Simple {@link CompactionRequest} on which you can wait until the requested compaction finishes. */ public static class TrackableCompactionRequest extends CompactionRequest { private CountDownLatch done; /** * Constructor for a custom compaction. Uses the setXXX methods to update the state of the * compaction before being used. */ public TrackableCompactionRequest(HRegion region, Store store, CountDownLatch finished) { super(region, store, Store.PRIORITY_USER); this.done = finished; } @Override public void run() { super.run(); this.done.countDown(); } } @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.compaction; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Test; import org.apache.cassandra.cql3.CQLTester; import org.apache.cassandra.cql3.UntypedResultSet; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class CompactionsCQLTest extends CQLTester { public static final int SLEEP_TIME = 5000; @Test public void testTriggerMinorCompactionSTCS() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY) WITH compaction = {'class':'SizeTieredCompactionStrategy', 'min_threshold':2};"); assertTrue(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("insert into %s (id) values ('1')"); flush(); execute("insert into %s (id) values ('1')"); flush(); waitForMinor(KEYSPACE, currentTable(), SLEEP_TIME, true); } @Test public void testTriggerMinorCompactionLCS() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY) WITH compaction = {'class':'LeveledCompactionStrategy', 'sstable_size_in_mb':1, 'fanout_size':5};"); assertTrue(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("insert into %s (id) values ('1')"); flush(); execute("insert into %s (id) values ('1')"); flush(); waitForMinor(KEYSPACE, currentTable(), SLEEP_TIME, true); } @Test public void testTriggerMinorCompactionDTCS() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY) WITH compaction = {'class':'DateTieredCompactionStrategy', 'min_threshold':2};"); assertTrue(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("insert into %s (id) values ('1')"); flush(); execute("insert into %s (id) values ('1')"); flush(); waitForMinor(KEYSPACE, currentTable(), SLEEP_TIME, true); } @Test public void testTriggerMinorCompactionTWCS() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY) WITH compaction = {'class':'TimeWindowCompactionStrategy', 'min_threshold':2};"); assertTrue(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("insert into %s (id) values ('1')"); flush(); execute("insert into %s (id) values ('1')"); flush(); waitForMinor(KEYSPACE, currentTable(), SLEEP_TIME, true); } @Test public void testTriggerNoMinorCompactionSTCSDisabled() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY) WITH compaction = {'class':'SizeTieredCompactionStrategy', 'min_threshold':2, 'enabled':false};"); assertFalse(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("insert into %s (id) values ('1')"); flush(); execute("insert into %s (id) values ('1')"); flush(); waitForMinor(KEYSPACE, currentTable(), SLEEP_TIME, false); } @Test public void testTriggerMinorCompactionSTCSNodetoolEnabled() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY) WITH compaction = {'class':'SizeTieredCompactionStrategy', 'min_threshold':2, 'enabled':false};"); assertFalse(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); getCurrentColumnFamilyStore().enableAutoCompaction(); assertTrue(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("insert into %s (id) values ('1')"); flush(); execute("insert into %s (id) values ('1')"); flush(); waitForMinor(KEYSPACE, currentTable(), SLEEP_TIME, true); } @Test public void testTriggerNoMinorCompactionSTCSNodetoolDisabled() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY) WITH compaction = {'class':'SizeTieredCompactionStrategy', 'min_threshold':2, 'enabled':true};"); assertTrue(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); getCurrentColumnFamilyStore().disableAutoCompaction(); assertFalse(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("insert into %s (id) values ('1')"); flush(); execute("insert into %s (id) values ('1')"); flush(); waitForMinor(KEYSPACE, currentTable(), SLEEP_TIME, false); } @Test public void testTriggerNoMinorCompactionSTCSAlterTable() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY) WITH compaction = {'class':'SizeTieredCompactionStrategy', 'min_threshold':2, 'enabled':true};"); assertTrue(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("ALTER TABLE %s WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'enabled': false}"); assertFalse(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("insert into %s (id) values ('1')"); flush(); execute("insert into %s (id) values ('1')"); flush(); waitForMinor(KEYSPACE, currentTable(), SLEEP_TIME, false); } @Test public void testTriggerMinorCompactionSTCSAlterTable() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY) WITH compaction = {'class':'SizeTieredCompactionStrategy', 'min_threshold':2, 'enabled':false};"); assertFalse(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("ALTER TABLE %s WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'min_threshold': 2, 'enabled': true}"); assertTrue(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); execute("insert into %s (id) values ('1')"); flush(); execute("insert into %s (id) values ('1')"); flush(); waitForMinor(KEYSPACE, currentTable(), SLEEP_TIME, true); } @Test public void testSetLocalCompactionStrategy() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY)"); Map<String, String> localOptions = new HashMap<>(); localOptions.put("class", "DateTieredCompactionStrategy"); getCurrentColumnFamilyStore().setCompactionParameters(localOptions); assertTrue(verifyStrategies(getCurrentColumnFamilyStore().getCompactionStrategyManager(), DateTieredCompactionStrategy.class)); // altering something non-compaction related execute("ALTER TABLE %s WITH gc_grace_seconds = 1000"); // should keep the local compaction strat assertTrue(verifyStrategies(getCurrentColumnFamilyStore().getCompactionStrategyManager(), DateTieredCompactionStrategy.class)); // altering a compaction option execute("ALTER TABLE %s WITH compaction = {'class':'SizeTieredCompactionStrategy', 'min_threshold':3}"); // will use the new option assertTrue(verifyStrategies(getCurrentColumnFamilyStore().getCompactionStrategyManager(), SizeTieredCompactionStrategy.class)); } @Test public void testSetLocalCompactionStrategyDisable() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY)"); Map<String, String> localOptions = new HashMap<>(); localOptions.put("class", "DateTieredCompactionStrategy"); localOptions.put("enabled", "false"); getCurrentColumnFamilyStore().setCompactionParameters(localOptions); assertFalse(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); localOptions.clear(); localOptions.put("class", "DateTieredCompactionStrategy"); // localOptions.put("enabled", "true"); - this is default! getCurrentColumnFamilyStore().setCompactionParameters(localOptions); assertTrue(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); } @Test public void testSetLocalCompactionStrategyEnable() throws Throwable { createTable("CREATE TABLE %s (id text PRIMARY KEY)"); Map<String, String> localOptions = new HashMap<>(); localOptions.put("class", "DateTieredCompactionStrategy"); getCurrentColumnFamilyStore().disableAutoCompaction(); assertFalse(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); getCurrentColumnFamilyStore().setCompactionParameters(localOptions); assertTrue(getCurrentColumnFamilyStore().getCompactionStrategyManager().isEnabled()); } @Test(expected = IllegalArgumentException.class) public void testBadLocalCompactionStrategyOptions() { createTable("CREATE TABLE %s (id text PRIMARY KEY)"); Map<String, String> localOptions = new HashMap<>(); localOptions.put("class","SizeTieredCompactionStrategy"); localOptions.put("sstable_size_in_mb","1234"); // not for STCS getCurrentColumnFamilyStore().setCompactionParameters(localOptions); } public boolean verifyStrategies(CompactionStrategyManager manager, Class<? extends AbstractCompactionStrategy> expected) { boolean found = false; for (List<AbstractCompactionStrategy> strategies : manager.getStrategies()) { if (!strategies.stream().allMatch((strategy) -> strategy.getClass().equals(expected))) return false; found = true; } return found; } private void waitForMinor(String keyspace, String cf, long maxWaitTime, boolean shouldFind) throws Throwable { long startTime = System.currentTimeMillis(); while (System.currentTimeMillis() - startTime < maxWaitTime) { UntypedResultSet res = execute("SELECT * FROM system.compaction_history"); for (UntypedResultSet.Row r : res) { if (r.getString("keyspace_name").equals(keyspace) && r.getString("columnfamily_name").equals(cf)) if (shouldFind) return; else fail("Found minor compaction"); } Thread.sleep(100); } if (shouldFind) fail("No minor compaction triggered in "+maxWaitTime+"ms"); } }
/** * Copyright 2011 Steve Coughlan. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.bushstar.htmlcoinj.core; import com.bushstar.htmlcoinj.params.MainNetParams; import com.bushstar.htmlcoinj.params.UnitTestParams; import com.bushstar.htmlcoinj.store.BlockStore; import com.bushstar.htmlcoinj.store.MemoryBlockStore; import org.junit.Before; import org.junit.Test; import org.spongycastle.util.encoders.Hex; import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import java.util.Arrays; import static com.bushstar.htmlcoinj.utils.TestUtils.createFakeBlock; import static com.bushstar.htmlcoinj.utils.TestUtils.createFakeTx; import static org.junit.Assert.*; public class LazyParseByteCacheTest { private final byte[] txMessage = Hex.decode( "F9 BE B4 D9 74 78 00 00 00 00 00 00 00 00 00 00" + "02 01 00 00 E2 93 CD BE 01 00 00 00 01 6D BD DB" + "08 5B 1D 8A F7 51 84 F0 BC 01 FA D5 8D 12 66 E9" + "B6 3B 50 88 19 90 E4 B4 0D 6A EE 36 29 00 00 00" + "00 8B 48 30 45 02 21 00 F3 58 1E 19 72 AE 8A C7" + "C7 36 7A 7A 25 3B C1 13 52 23 AD B9 A4 68 BB 3A" + "59 23 3F 45 BC 57 83 80 02 20 59 AF 01 CA 17 D0" + "0E 41 83 7A 1D 58 E9 7A A3 1B AE 58 4E DE C2 8D" + "35 BD 96 92 36 90 91 3B AE 9A 01 41 04 9C 02 BF" + "C9 7E F2 36 CE 6D 8F E5 D9 40 13 C7 21 E9 15 98" + "2A CD 2B 12 B6 5D 9B 7D 59 E2 0A 84 20 05 F8 FC" + "4E 02 53 2E 87 3D 37 B9 6F 09 D6 D4 51 1A DA 8F" + "14 04 2F 46 61 4A 4C 70 C0 F1 4B EF F5 FF FF FF" + "FF 02 40 4B 4C 00 00 00 00 00 19 76 A9 14 1A A0" + "CD 1C BE A6 E7 45 8A 7A BA D5 12 A9 D9 EA 1A FB" + "22 5E 88 AC 80 FA E9 C7 00 00 00 00 19 76 A9 14" + "0E AB 5B EA 43 6A 04 84 CF AB 12 48 5E FD A0 B7" + "8B 4E CC 52 88 AC 00 00 00 00"); private final byte[] txMessagePart = Hex.decode( "08 5B 1D 8A F7 51 84 F0 BC 01 FA D5 8D 12 66 E9" + "B6 3B 50 88 19 90 E4 B4 0D 6A EE 36 29 00 00 00" + "00 8B 48 30 45 02 21 00 F3 58 1E 19 72 AE 8A C7" + "C7 36 7A 7A 25 3B C1 13 52 23 AD B9 A4 68 BB 3A"); private Wallet wallet; private BlockStore blockStore; private NetworkParameters unitTestParams; private byte[] b1Bytes; private byte[] b1BytesWithHeader; private byte[] tx1Bytes; private byte[] tx1BytesWithHeader; private byte[] tx2Bytes; private byte[] tx2BytesWithHeader; private void resetBlockStore() { blockStore = new MemoryBlockStore(unitTestParams); } @Before public void setUp() throws Exception { unitTestParams = UnitTestParams.get(); wallet = new Wallet(unitTestParams); wallet.addKey(new ECKey()); resetBlockStore(); Transaction tx1 = createFakeTx(unitTestParams, Utils.toNanoCoins(2, 0), wallet.getKeys().get(0).toAddress(unitTestParams)); //add a second input so can test granularity of byte cache. Transaction prevTx = new Transaction(unitTestParams); TransactionOutput prevOut = new TransactionOutput(unitTestParams, prevTx, Utils.toNanoCoins(1, 0), wallet.getKeys().get(0).toAddress(unitTestParams)); prevTx.addOutput(prevOut); // Connect it. tx1.addInput(prevOut); Transaction tx2 = createFakeTx(unitTestParams, Utils.toNanoCoins(1, 0), new ECKey().toAddress(unitTestParams)); Block b1 = createFakeBlock(blockStore, tx1, tx2).block; HTMLcoinSerializer bs = new HTMLcoinSerializer(unitTestParams); ByteArrayOutputStream bos = new ByteArrayOutputStream(); bs.serialize(tx1, bos); tx1BytesWithHeader = bos.toByteArray(); tx1Bytes = tx1.htmlcoinSerialize(); bos.reset(); bs.serialize(tx2, bos); tx2BytesWithHeader = bos.toByteArray(); tx2Bytes = tx2.htmlcoinSerialize(); bos.reset(); bs.serialize(b1, bos); b1BytesWithHeader = bos.toByteArray(); b1Bytes = b1.htmlcoinSerialize(); } @Test public void validateSetup() { byte[] b1 = new byte[] {1, 1, 1, 2, 3, 4, 5, 6, 7}; byte[] b2 = new byte[] {1, 2, 3}; assertTrue(arrayContains(b1, b2)); assertTrue(arrayContains(txMessage, txMessagePart)); assertTrue(arrayContains(tx1BytesWithHeader, tx1Bytes)); assertTrue(arrayContains(tx2BytesWithHeader, tx2Bytes)); assertTrue(arrayContains(b1BytesWithHeader, b1Bytes)); assertTrue(arrayContains(b1BytesWithHeader, tx1Bytes)); assertTrue(arrayContains(b1BytesWithHeader, tx2Bytes)); assertFalse(arrayContains(tx1BytesWithHeader, b1Bytes)); } @Test public void testTransactionsLazyRetain() throws Exception { testTransaction(MainNetParams.get(), txMessage, false, true, true); testTransaction(unitTestParams, tx1BytesWithHeader, false, true, true); testTransaction(unitTestParams, tx2BytesWithHeader, false, true, true); } @Test public void testTransactionsLazyNoRetain() throws Exception { testTransaction(MainNetParams.get(), txMessage, false, true, false); testTransaction(unitTestParams, tx1BytesWithHeader, false, true, false); testTransaction(unitTestParams, tx2BytesWithHeader, false, true, false); } @Test public void testTransactionsNoLazyNoRetain() throws Exception { testTransaction(MainNetParams.get(), txMessage, false, false, false); testTransaction(unitTestParams, tx1BytesWithHeader, false, false, false); testTransaction(unitTestParams, tx2BytesWithHeader, false, false, false); } @Test public void testTransactionsNoLazyRetain() throws Exception { testTransaction(MainNetParams.get(), txMessage, false, false, true); testTransaction(unitTestParams, tx1BytesWithHeader, false, false, true); testTransaction(unitTestParams, tx2BytesWithHeader, false, false, true); } @Test public void testBlockAll() throws Exception { testBlock(b1BytesWithHeader, false, false, false); testBlock(b1BytesWithHeader, false, true, true); testBlock(b1BytesWithHeader, false, true, false); testBlock(b1BytesWithHeader, false, false, true); } public void testBlock(byte[] blockBytes, boolean isChild, boolean lazy, boolean retain) throws Exception { //reference serializer to produce comparison serialization output after changes to //message structure. HTMLcoinSerializer bsRef = new HTMLcoinSerializer(unitTestParams, false, false); ByteArrayOutputStream bos = new ByteArrayOutputStream(); HTMLcoinSerializer bs = new HTMLcoinSerializer(unitTestParams, lazy, retain); Block b1; Block bRef; b1 = (Block) bs.deserialize(ByteBuffer.wrap(blockBytes)); bRef = (Block) bsRef.deserialize(ByteBuffer.wrap(blockBytes)); //verify our reference HTMLcoinSerializer produces matching byte array. bos.reset(); bsRef.serialize(bRef, bos); assertTrue(Arrays.equals(bos.toByteArray(), blockBytes)); //check lazy and retain status survive both before and after a serialization assertEquals(!lazy, b1.isParsedTransactions()); assertEquals(!lazy, b1.isParsedHeader()); if (b1.isParsedHeader()) assertEquals(retain, b1.isHeaderBytesValid()); if (b1.isParsedTransactions()) assertEquals(retain, b1.isTransactionBytesValid()); serDeser(bs, b1, blockBytes, null, null); assertEquals(!lazy, b1.isParsedTransactions()); assertEquals(!lazy, b1.isParsedHeader()); if (b1.isParsedHeader()) assertEquals(retain, b1.isHeaderBytesValid()); if (b1.isParsedTransactions()) assertEquals(retain, b1.isTransactionBytesValid()); //compare to ref block bos.reset(); bsRef.serialize(bRef, bos); serDeser(bs, b1, bos.toByteArray(), null, null); //retrieve a value from a child b1.getTransactions(); assertTrue(b1.isParsedTransactions()); if (b1.getTransactions().size() > 0) { assertTrue(b1.isParsedTransactions()); Transaction tx1 = b1.getTransactions().get(0); //this will always be true for all children of a block once they are retrieved. //the tx child inputs/outputs may not be parsed however. //no longer forced to parse if length not provided. //assertEquals(true, tx1.isParsed()); if (tx1.isParsed()) assertEquals(retain, tx1.isCached()); else assertTrue(tx1.isCached()); //does it still match ref block? serDeser(bs, b1, bos.toByteArray(), null, null); } //refresh block b1 = (Block) bs.deserialize(ByteBuffer.wrap(blockBytes)); bRef = (Block) bsRef.deserialize(ByteBuffer.wrap(blockBytes)); //retrieve a value from header b1.getDifficultyTarget(); assertTrue(b1.isParsedHeader()); assertEquals(lazy, !b1.isParsedTransactions()); //does it still match ref block? serDeser(bs, b1, bos.toByteArray(), null, null); //refresh block b1 = (Block) bs.deserialize(ByteBuffer.wrap(blockBytes)); bRef = (Block) bsRef.deserialize(ByteBuffer.wrap(blockBytes)); //retrieve a value from a child and header b1.getDifficultyTarget(); assertTrue(b1.isParsedHeader()); assertEquals(lazy, !b1.isParsedTransactions()); b1.getTransactions(); assertTrue(b1.isParsedTransactions()); if (b1.getTransactions().size() > 0) { assertTrue(b1.isParsedTransactions()); Transaction tx1 = b1.getTransactions().get(0); //no longer forced to parse if length not provided. //assertEquals(true, tx1.isParsed()); if (tx1.isParsed()) assertEquals(retain, tx1.isCached()); else assertTrue(tx1.isCached()); } //does it still match ref block? serDeser(bs, b1, bos.toByteArray(), null, null); //refresh block b1 = (Block) bs.deserialize(ByteBuffer.wrap(blockBytes)); bRef = (Block) bsRef.deserialize(ByteBuffer.wrap(blockBytes)); //change a value in header b1.setNonce(23); bRef.setNonce(23); assertTrue(b1.isParsedHeader()); assertEquals(lazy, !b1.isParsedTransactions()); assertFalse(b1.isHeaderBytesValid()); if (b1.isParsedTransactions()) assertEquals(retain , b1.isTransactionBytesValid()); else assertEquals(true, b1.isTransactionBytesValid()); //does it still match ref block? bos.reset(); bsRef.serialize(bRef, bos); serDeser(bs, b1, bos.toByteArray(), null, null); //refresh block b1 = (Block) bs.deserialize(ByteBuffer.wrap(blockBytes)); bRef = (Block) bsRef.deserialize(ByteBuffer.wrap(blockBytes)); //retrieve a value from a child of a child b1.getTransactions(); if (b1.getTransactions().size() > 0) { Transaction tx1 = b1.getTransactions().get(0); TransactionInput tin = tx1.getInputs().get(0); assertTrue(tx1.isParsed()); assertTrue(b1.isParsedTransactions()); assertEquals(!lazy, b1.isParsedHeader()); assertEquals(!lazy, tin.isParsed()); assertEquals(tin.isParsed() ? retain : true, tin.isCached()); //does it still match ref tx? bos.reset(); bsRef.serialize(bRef, bos); serDeser(bs, b1, bos.toByteArray(), null, null); } //refresh block b1 = (Block) bs.deserialize(ByteBuffer.wrap(blockBytes)); bRef = (Block) bsRef.deserialize(ByteBuffer.wrap(blockBytes)); //add an input b1.getTransactions(); if (b1.getTransactions().size() > 0) { Transaction tx1 = b1.getTransactions().get(0); if (tx1.getInputs().size() > 0) { tx1.addInput(tx1.getInputs().get(0)); //replicate on reference tx bRef.getTransactions().get(0).addInput(bRef.getTransactions().get(0).getInputs().get(0)); assertFalse(tx1.isCached()); assertTrue(tx1.isParsed()); assertFalse(b1.isTransactionBytesValid()); assertTrue(b1.isParsedHeader()); //confirm sibling cache status was unaffected if (tx1.getInputs().size() > 1) { boolean parsed = tx1.getInputs().get(1).isParsed(); assertEquals(parsed ? retain : true, tx1.getInputs().get(1).isCached()); assertEquals(!lazy, parsed); } //this has to be false. Altering a tx invalidates the merkle root. //when we have seperate merkle caching then the entire header won't need to be //invalidated. assertFalse(b1.isHeaderBytesValid()); bos.reset(); bsRef.serialize(bRef, bos); byte[] source = bos.toByteArray(); //confirm we still match the reference tx. serDeser(bs, b1, source, null, null); } //does it still match ref tx? bos.reset(); bsRef.serialize(bRef, bos); serDeser(bs, b1, bos.toByteArray(), null, null); } //refresh block b1 = (Block) bs.deserialize(ByteBuffer.wrap(blockBytes)); Block b2 = (Block) bs.deserialize(ByteBuffer.wrap(blockBytes)); bRef = (Block) bsRef.deserialize(ByteBuffer.wrap(blockBytes)); Block bRef2 = (Block) bsRef.deserialize(ByteBuffer.wrap(blockBytes)); //reparent an input b1.getTransactions(); if (b1.getTransactions().size() > 0) { Transaction tx1 = b1.getTransactions().get(0); Transaction tx2 = b2.getTransactions().get(0); if (tx1.getInputs().size() > 0) { TransactionInput fromTx1 = tx1.getInputs().get(0); tx2.addInput(fromTx1); //replicate on reference tx TransactionInput fromTxRef = bRef.getTransactions().get(0).getInputs().get(0); bRef2.getTransactions().get(0).addInput(fromTxRef); //b1 hasn't changed but it's no longer in the parent //chain of fromTx1 so has to have been uncached since it won't be //notified of changes throught the parent chain anymore. assertFalse(b1.isTransactionBytesValid()); //b2 should have it's cache invalidated because it has changed. assertFalse(b2.isTransactionBytesValid()); bos.reset(); bsRef.serialize(bRef2, bos); byte[] source = bos.toByteArray(); //confirm altered block matches altered ref block. serDeser(bs, b2, source, null, null); } //does unaltered block still match ref block? bos.reset(); bsRef.serialize(bRef, bos); serDeser(bs, b1, bos.toByteArray(), null, null); //how about if we refresh it? bRef = (Block) bsRef.deserialize(ByteBuffer.wrap(blockBytes)); bos.reset(); bsRef.serialize(bRef, bos); serDeser(bs, b1, bos.toByteArray(), null, null); } } public void testTransaction(NetworkParameters params, byte[] txBytes, boolean isChild, boolean lazy, boolean retain) throws Exception { //reference serializer to produce comparison serialization output after changes to //message structure. HTMLcoinSerializer bsRef = new HTMLcoinSerializer(params, false, false); ByteArrayOutputStream bos = new ByteArrayOutputStream(); HTMLcoinSerializer bs = new HTMLcoinSerializer(params, lazy, retain); Transaction t1; Transaction tRef; t1 = (Transaction) bs.deserialize(ByteBuffer.wrap(txBytes)); tRef = (Transaction) bsRef.deserialize(ByteBuffer.wrap(txBytes)); //verify our reference HTMLcoinSerializer produces matching byte array. bos.reset(); bsRef.serialize(tRef, bos); assertTrue(Arrays.equals(bos.toByteArray(), txBytes)); //check lazy and retain status survive both before and after a serialization assertEquals(!lazy, t1.isParsed()); if (t1.isParsed()) assertEquals(retain, t1.isCached()); serDeser(bs, t1, txBytes, null, null); assertEquals(lazy, !t1.isParsed()); if (t1.isParsed()) assertEquals(retain, t1.isCached()); //compare to ref tx bos.reset(); bsRef.serialize(tRef, bos); serDeser(bs, t1, bos.toByteArray(), null, null); //retrieve a value from a child t1.getInputs(); assertTrue(t1.isParsed()); if (t1.getInputs().size() > 0) { assertTrue(t1.isParsed()); TransactionInput tin = t1.getInputs().get(0); assertEquals(!lazy, tin.isParsed()); if (tin.isParsed()) assertEquals(retain, tin.isCached()); //does it still match ref tx? serDeser(bs, t1, bos.toByteArray(), null, null); } //refresh tx t1 = (Transaction) bs.deserialize(ByteBuffer.wrap(txBytes)); tRef = (Transaction) bsRef.deserialize(ByteBuffer.wrap(txBytes)); //add an input if (t1.getInputs().size() > 0) { t1.addInput(t1.getInputs().get(0)); //replicate on reference tx tRef.addInput(tRef.getInputs().get(0)); assertFalse(t1.isCached()); assertTrue(t1.isParsed()); bos.reset(); bsRef.serialize(tRef, bos); byte[] source = bos.toByteArray(); //confirm we still match the reference tx. serDeser(bs, t1, source, null, null); } } private void serDeser(HTMLcoinSerializer bs, Message message, byte[] sourceBytes, byte[] containedBytes, byte[] containingBytes) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); bs.serialize(message, bos); byte[] b1 = bos.toByteArray(); Message m2 = bs.deserialize(ByteBuffer.wrap(b1)); assertEquals(message, m2); bos.reset(); bs.serialize(m2, bos); byte[] b2 = bos.toByteArray(); assertTrue(Arrays.equals(b1, b2)); if (sourceBytes != null) { assertTrue(arrayContains(sourceBytes, b1)); assertTrue(arrayContains(sourceBytes, b2)); } if (containedBytes != null) { assertTrue(arrayContains(b1, containedBytes)); } if (containingBytes != null) { assertTrue(arrayContains(containingBytes, b1)); } } public static boolean arrayContains(byte[] sup, byte[] sub) { if (sup.length < sub.length) return false; String superstring = Utils.bytesToHexString(sup); String substring = Utils.bytesToHexString(sub); int ind = superstring.indexOf(substring); StringBuilder sb = new StringBuilder(); for (int i = 0; i < superstring.indexOf(substring); i++) sb.append(" "); //System.out.println(superstring); //System.out.println(sb.append(substring).toString()); //System.out.println(); return ind > -1; } }
package edu.mit.kacquah.deckviewer.action.exec; import java.awt.Point; import java.util.LinkedList; import processing.core.PApplet; import edu.mit.kacquah.deckviewer.deckobjects.FlyingObject; import edu.mit.kacquah.deckviewer.environment.Deck; import edu.mit.kacquah.deckviewer.environment.ParkingRegion; import edu.mit.kacquah.deckviewer.environment.ParkingRegion.ParkingRegionType; import edu.mit.kacquah.deckviewer.environment.ParkingSpot; import edu.mit.kacquah.deckviewer.game.DeckViewerPApplet; import edu.mit.kacquah.deckviewer.game.GlobalSettings; import edu.mit.kacquah.deckviewer.gui.shape.BlinkingCircle; import edu.mit.kacquah.deckviewer.gui.shape.StraightLineArrow; import edu.mit.kacquah.deckviewer.speech.synthesis.SpeechGraph; import edu.mit.kacquah.deckviewer.speech.synthesis.SpeechNode; import edu.mit.kacquah.deckviewer.utils.ColorUtil; import edu.mit.kacquah.deckviewer.utils.RenderGroup; /** * Action for finding an alternate destination for aircraft being moved on deck. * Note, this currently only supports moving one aircraft. * TODO(KoolJBlack) Update this to handle multiple aircraft re-routing. * @author kojo * */ public class FindAlternateTargetAction extends SpeechGraph implements ExecAction { // ---------------------------Speech Nodes------------------------------------ private class PreProcessTarget extends SpeechNode { public PreProcessTarget(SpeechGraph speechGraph) { super(speechGraph); this.speechText = null; } @Override public void preSpeechProcess() { calculateAlternateParkingSpots(); // Start the render pipeline parentGraph.setNextSpeechNode(new RenderBlocked(parentGraph)); yieldNext(); } @Override public void postSpeechProcess() { // Not called } /** * Determines alternate spots and regions for parked aircraft by looking up * closest free parking spots. */ private void calculateAlternateParkingSpots() { // Find the closest spots in the parking region target Point centroid = moveToParkingRegion.getCentroid(); LinkedList<ParkingSpot> blockSpots = new LinkedList<ParkingSpot>(); for (ParkingSpot spot: moveToParkingSpots) { if (spot != null) { blockSpots.add(spot); } } alternateParkingSpots = Deck.getInstance().closestFreeParkingSpots( centroid, numNullSpots, blockSpots, ParkingRegionType.CATAPULT_ELEVATOR_TYPES); // Get the names of the alternate parking region alternateParkingRegions = new LinkedList<ParkingRegion>(); for (ParkingSpot spot: alternateParkingSpots) { if (!alternateParkingRegions.contains(spot.parkingRegion())) { alternateParkingRegions.add(spot.parkingRegion()); } } } } private class RenderBlocked extends SpeechNode { public RenderBlocked(SpeechGraph speechGraph) { super(speechGraph); } @Override public void preSpeechProcess() { // Explain block spots. this.speechText = "Sorry, there is not enough room on the " + moveToParkingRegion.name(); // Highlight all parking spots in the target region for (ParkingSpot spot: moveToParkingRegion.parkingSpots()) { BlinkingCircle circle = new BlinkingCircle(spot.center, GlobalSettings.AIRCRAFT_RADIUS, ColorUtil.RED, true); renderGroup.addRenderObject(circle); } DeckViewerPApplet.getInstance().renderStack().addRenderGroup(renderGroup); yieldWait(); } @Override public void postSpeechProcess() { DeckViewerPApplet.getInstance().renderStack().removeRenderGroup(renderGroup); renderGroup.clear(); parentGraph.setNextSpeechNode(new RenderAlternate(parentGraph)); yieldNext(); } } private class RenderAlternate extends SpeechNode { public RenderAlternate(SpeechGraph speechGraph) { super(speechGraph); } @Override public void preSpeechProcess() { // Explain block spots. We assume there is only one parking region. this.speechText = "The next closest spot is at the " + alternateParkingRegions.get(0).name() + ". Shall I move the aircraft there instead?"; // Render the alternate placement Point center = alternateParkingSpots.get(0).center; BlinkingCircle circle = new BlinkingCircle(center, GlobalSettings.AIRCRAFT_RADIUS, ColorUtil.BLUE, false); Point start = new Point((int)(moveAircraft.get(0).positionFloat().x), (int)(moveAircraft.get(0).positionFloat().y)); StraightLineArrow lineArrow = new StraightLineArrow(start, center, ColorUtil.BLUE); renderGroup.addRenderObject(circle); renderGroup.addRenderObject(lineArrow); DeckViewerPApplet.getInstance().renderStack().addRenderGroup(renderGroup); yieldAffirmative(); } @Override public void postSpeechProcess() { DeckViewerPApplet.getInstance().renderStack().removeRenderGroup(renderGroup); renderGroup.clear(); // Our next action is based on affirmative response. Affirmative affirmative = parentGraph.getLastAffirmative(); if (affirmative == Affirmative.YES) { parentGraph.setNextSpeechNode(new DoMove(parentGraph)); } else { parentGraph.setNextSpeechNode(new DontDoMove(parentGraph)); } yieldNext(); } } private class DoMove extends SpeechNode { public DoMove(SpeechGraph speechGraph) { super(speechGraph); this.speechText = "Ok Done!"; } @Override public void preSpeechProcess() { // Move aircraft to their destinations for (int i = 0; i < moveAircraft.size(); ++i) { FlyingObject o = moveAircraft.get(i); ParkingSpot p = alternateParkingSpots.get(i); p.park(o); } // Yeild to give confirmation yieldWait(); } @Override public void postSpeechProcess() { yieldDone(); } } private class DontDoMove extends SpeechNode{ public DontDoMove(SpeechGraph speechGraph) { super(speechGraph); this.speechText = "Ok, please give another command."; } @Override public void preSpeechProcess() { yieldWait(); } @Override public void postSpeechProcess() { yieldDone(); } } // ---------------------------Speech Graph------------------------------------ // Parent action stack. private ExecActionStack actionStack; /** * List of aircraft to move. */ private LinkedList<FlyingObject> moveAircraft; /** * List of parking spot destinations corresponding to each move aircraft. */ private LinkedList<ParkingSpot> moveToParkingSpots; private int numNullSpots; /** * Parking regions specified as the target. */ private ParkingRegion moveToParkingRegion; /** * Alternate spots for parking. */ private LinkedList<ParkingSpot> alternateParkingSpots; private LinkedList<ParkingRegion> alternateParkingRegions; /** * Rendering animations. */ private RenderGroup renderGroup; public FindAlternateTargetAction(ExecActionStack actionStack, LinkedList<FlyingObject> moveAircraft, ParkingRegion target, LinkedList<ParkingSpot> moveToParkingSpots, int numNullSpots) { this.actionStack = actionStack; this.moveAircraft = moveAircraft; this.moveToParkingSpots = moveToParkingSpots; this.numNullSpots = numNullSpots; this.moveToParkingRegion = target; this.renderGroup = new RenderGroup(); } @Override protected SpeechNode rootNode() { return new PreProcessTarget(this); } @Override public void update(long elapsedTime) { super.update(elapsedTime); } @Override public void render(PApplet p) { super.render(p); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.repair; import java.net.InetAddress; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.*; import org.apache.commons.lang3.time.DurationFormatUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.concurrent.JMXConfigurableThreadPoolExecutor; import org.apache.cassandra.concurrent.NamedThreadFactory; import org.apache.cassandra.config.SchemaConstants; import org.apache.cassandra.cql3.QueryOptions; import org.apache.cassandra.cql3.QueryProcessor; import org.apache.cassandra.cql3.UntypedResultSet; import org.apache.cassandra.cql3.statements.SelectStatement; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.ConsistencyLevel; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.repair.messages.RepairOption; import org.apache.cassandra.service.ActiveRepairService; import org.apache.cassandra.service.ClientState; import org.apache.cassandra.service.QueryState; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.tracing.TraceKeyspace; import org.apache.cassandra.tracing.TraceState; import org.apache.cassandra.tracing.Tracing; import org.apache.cassandra.transport.messages.ResultMessage; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.Pair; import org.apache.cassandra.utils.UUIDGen; import org.apache.cassandra.utils.WrappedRunnable; import org.apache.cassandra.utils.progress.ProgressEvent; import org.apache.cassandra.utils.progress.ProgressEventNotifier; import org.apache.cassandra.utils.progress.ProgressEventType; import org.apache.cassandra.utils.progress.ProgressListener; public class RepairRunnable extends WrappedRunnable implements ProgressEventNotifier { private static final Logger logger = LoggerFactory.getLogger(RepairRunnable.class); private StorageService storageService; private final int cmd; private final RepairOption options; private final String keyspace; private final List<ProgressListener> listeners = new ArrayList<>(); private static final AtomicInteger threadCounter = new AtomicInteger(1); public RepairRunnable(StorageService storageService, int cmd, RepairOption options, String keyspace) { this.storageService = storageService; this.cmd = cmd; this.options = options; this.keyspace = keyspace; } @Override public void addProgressListener(ProgressListener listener) { listeners.add(listener); } @Override public void removeProgressListener(ProgressListener listener) { listeners.remove(listener); } protected void fireProgressEvent(String tag, ProgressEvent event) { for (ProgressListener listener : listeners) { listener.progress(tag, event); } } protected void fireErrorAndComplete(String tag, int progressCount, int totalProgress, String message) { fireProgressEvent(tag, new ProgressEvent(ProgressEventType.ERROR, progressCount, totalProgress, String.format("Repair command #%d failed with error %s", cmd, message))); fireProgressEvent(tag, new ProgressEvent(ProgressEventType.COMPLETE, progressCount, totalProgress, String.format("Repair command #%d finished with error", cmd))); } protected void runMayThrow() throws Exception { final TraceState traceState; final UUID parentSession = UUIDGen.getTimeUUID(); final String tag = "repair:" + cmd; final AtomicInteger progress = new AtomicInteger(); final int totalProgress = 4 + options.getRanges().size(); // get valid column families, calculate neighbors, validation, prepare for repair + number of ranges to repair String[] columnFamilies = options.getColumnFamilies().toArray(new String[options.getColumnFamilies().size()]); Iterable<ColumnFamilyStore> validColumnFamilies; try { validColumnFamilies = storageService.getValidColumnFamilies(false, false, keyspace, columnFamilies); progress.incrementAndGet(); } catch (IllegalArgumentException e) { logger.error("Repair failed:", e); fireErrorAndComplete(tag, progress.get(), totalProgress, e.getMessage()); return; } final long startTime = System.currentTimeMillis(); String message = String.format("Starting repair command #%d (%s), repairing keyspace %s with %s", cmd, parentSession, keyspace, options); logger.info(message); if (options.isTraced()) { StringBuilder cfsb = new StringBuilder(); for (ColumnFamilyStore cfs : validColumnFamilies) cfsb.append(", ").append(cfs.keyspace.getName()).append(".").append(cfs.name); UUID sessionId = Tracing.instance.newSession(Tracing.TraceType.REPAIR); traceState = Tracing.instance.begin("repair", ImmutableMap.of("keyspace", keyspace, "columnFamilies", cfsb.substring(2))); message = message + " tracing with " + sessionId; fireProgressEvent(tag, new ProgressEvent(ProgressEventType.START, 0, 100, message)); Tracing.traceRepair(message); traceState.enableActivityNotification(tag); for (ProgressListener listener : listeners) traceState.addProgressListener(listener); Thread queryThread = createQueryThread(cmd, sessionId); queryThread.setName("RepairTracePolling"); queryThread.start(); } else { fireProgressEvent(tag, new ProgressEvent(ProgressEventType.START, 0, 100, message)); traceState = null; } final Set<InetAddress> allNeighbors = new HashSet<>(); List<Pair<Set<InetAddress>, ? extends Collection<Range<Token>>>> commonRanges = new ArrayList<>(); //pre-calculate output of getLocalRanges and pass it to getNeighbors to increase performance and prevent //calculation multiple times Collection<Range<Token>> keyspaceLocalRanges = storageService.getLocalRanges(keyspace); try { for (Range<Token> range : options.getRanges()) { Set<InetAddress> neighbors = ActiveRepairService.getNeighbors(keyspace, keyspaceLocalRanges, range, options.getDataCenters(), options.getHosts()); if (neighbors.isEmpty()) { if (options.ignoreUnreplicatedKeyspaces()) { logger.info("Found no neighbors for range {} for {} - ignoring since repairing with --ignore-unreplicated-keyspaces", range, keyspace); continue; } else { String errorMessage = String.format("Nothing to repair for %s in %s - aborting", range, keyspace); logger.error("Repair {}", errorMessage); fireErrorAndComplete(tag, progress.get(), totalProgress, errorMessage); return; } } addRangeToNeighbors(commonRanges, range, neighbors); allNeighbors.addAll(neighbors); } progress.incrementAndGet(); } catch (IllegalArgumentException e) { logger.error("Repair failed:", e); fireErrorAndComplete(tag, progress.get(), totalProgress, e.getMessage()); return; } if (options.ignoreUnreplicatedKeyspaces() && allNeighbors.isEmpty()) { String ignoreUnreplicatedMessage = String.format("Nothing to repair for %s in %s - unreplicated keyspace is ignored since repair was called with --ignore-unreplicated-keyspaces", options.getRanges(), keyspace); logger.info("Repair {}", ignoreUnreplicatedMessage); fireProgressEvent(tag, new ProgressEvent(ProgressEventType.COMPLETE, progress.get(), totalProgress, ignoreUnreplicatedMessage)); return; } // Validate columnfamilies List<ColumnFamilyStore> columnFamilyStores = new ArrayList<>(); try { Iterables.addAll(columnFamilyStores, validColumnFamilies); progress.incrementAndGet(); } catch (IllegalArgumentException e) { fireErrorAndComplete(tag, progress.get(), totalProgress, e.getMessage()); return; } String[] cfnames = new String[columnFamilyStores.size()]; for (int i = 0; i < columnFamilyStores.size(); i++) { cfnames[i] = columnFamilyStores.get(i).name; } SystemDistributedKeyspace.startParentRepair(parentSession, keyspace, cfnames, options); long repairedAt; try { ActiveRepairService.instance.prepareForRepair(parentSession, FBUtilities.getBroadcastAddress(), allNeighbors, options, columnFamilyStores); repairedAt = ActiveRepairService.instance.getParentRepairSession(parentSession).getRepairedAt(); progress.incrementAndGet(); } catch (Throwable t) { SystemDistributedKeyspace.failParentRepair(parentSession, t); fireErrorAndComplete(tag, progress.get(), totalProgress, t.getMessage()); return; } // Set up RepairJob executor for this repair command. final ListeningExecutorService executor = MoreExecutors.listeningDecorator(new JMXConfigurableThreadPoolExecutor(options.getJobThreads(), Integer.MAX_VALUE, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new NamedThreadFactory("Repair#" + cmd), "internal")); List<ListenableFuture<RepairSessionResult>> futures = new ArrayList<>(options.getRanges().size()); for (Pair<Set<InetAddress>, ? extends Collection<Range<Token>>> p : commonRanges) { final RepairSession session = ActiveRepairService.instance.submitRepairSession(parentSession, p.right, keyspace, options.getParallelism(), p.left, repairedAt, options.isPullRepair(), executor, cfnames); if (session == null) continue; // After repair session completes, notify client its result Futures.addCallback(session, new FutureCallback<RepairSessionResult>() { public void onSuccess(RepairSessionResult result) { /** * If the success message below is modified, it must also be updated on * {@link org.apache.cassandra.utils.progress.jmx.LegacyJMXProgressSupport} * for backward-compatibility support. */ String message = String.format("Repair session %s for range %s finished", session.getId(), session.getRanges().toString()); logger.info(message); fireProgressEvent(tag, new ProgressEvent(ProgressEventType.PROGRESS, progress.incrementAndGet(), totalProgress, message)); } public void onFailure(Throwable t) { /** * If the failure message below is modified, it must also be updated on * {@link org.apache.cassandra.utils.progress.jmx.LegacyJMXProgressSupport} * for backward-compatibility support. */ String message = String.format("Repair session %s for range %s failed with error %s", session.getId(), session.getRanges().toString(), t.getMessage()); logger.error(message, t); fireProgressEvent(tag, new ProgressEvent(ProgressEventType.PROGRESS, progress.incrementAndGet(), totalProgress, message)); } }); futures.add(session); } // After all repair sessions completes(successful or not), // run anticompaction if necessary and send finish notice back to client final Collection<Range<Token>> successfulRanges = new ArrayList<>(); final AtomicBoolean hasFailure = new AtomicBoolean(); final ListenableFuture<List<RepairSessionResult>> allSessions = Futures.successfulAsList(futures); ListenableFuture anticompactionResult = Futures.transform(allSessions, new AsyncFunction<List<RepairSessionResult>, Object>() { @SuppressWarnings("unchecked") public ListenableFuture apply(List<RepairSessionResult> results) { // filter out null(=failed) results and get successful ranges for (RepairSessionResult sessionResult : results) { if (sessionResult != null) { successfulRanges.addAll(sessionResult.ranges); } else { hasFailure.compareAndSet(false, true); } } return ActiveRepairService.instance.finishParentSession(parentSession, allNeighbors, successfulRanges); } }); Futures.addCallback(anticompactionResult, new FutureCallback<Object>() { public void onSuccess(Object result) { SystemDistributedKeyspace.successfulParentRepair(parentSession, successfulRanges); if (hasFailure.get()) { fireProgressEvent(tag, new ProgressEvent(ProgressEventType.ERROR, progress.get(), totalProgress, "Some repair failed")); } else { fireProgressEvent(tag, new ProgressEvent(ProgressEventType.SUCCESS, progress.get(), totalProgress, "Repair completed successfully")); } repairComplete(); } public void onFailure(Throwable t) { fireProgressEvent(tag, new ProgressEvent(ProgressEventType.ERROR, progress.get(), totalProgress, t.getMessage())); SystemDistributedKeyspace.failParentRepair(parentSession, t); repairComplete(); } private void repairComplete() { String duration = DurationFormatUtils.formatDurationWords(System.currentTimeMillis() - startTime, true, true); String message = String.format("Repair command #%d finished in %s", cmd, duration); fireProgressEvent(tag, new ProgressEvent(ProgressEventType.COMPLETE, progress.get(), totalProgress, message)); logger.info(message); if (options.isTraced() && traceState != null) { for (ProgressListener listener : listeners) traceState.removeProgressListener(listener); // Because DebuggableThreadPoolExecutor#afterExecute and this callback // run in a nondeterministic order (within the same thread), the // TraceState may have been nulled out at this point. The TraceState // should be traceState, so just set it without bothering to check if it // actually was nulled out. Tracing.instance.set(traceState); Tracing.traceRepair(message); Tracing.instance.stopSession(); } executor.shutdownNow(); } }); } private void addRangeToNeighbors(List<Pair<Set<InetAddress>, ? extends Collection<Range<Token>>>> neighborRangeList, Range<Token> range, Set<InetAddress> neighbors) { for (int i = 0; i < neighborRangeList.size(); i++) { Pair<Set<InetAddress>, ? extends Collection<Range<Token>>> p = neighborRangeList.get(i); if (p.left.containsAll(neighbors)) { p.right.add(range); return; } } List<Range<Token>> ranges = new ArrayList<>(); ranges.add(range); neighborRangeList.add(Pair.create(neighbors, ranges)); } private Thread createQueryThread(final int cmd, final UUID sessionId) { return NamedThreadFactory.createThread(new WrappedRunnable() { // Query events within a time interval that overlaps the last by one second. Ignore duplicates. Ignore local traces. // Wake up upon local trace activity. Query when notified of trace activity with a timeout that doubles every two timeouts. public void runMayThrow() throws Exception { TraceState state = Tracing.instance.get(sessionId); if (state == null) throw new Exception("no tracestate"); String format = "select event_id, source, activity from %s.%s where session_id = ? and event_id > ? and event_id < ?;"; String query = String.format(format, SchemaConstants.TRACE_KEYSPACE_NAME, TraceKeyspace.EVENTS); SelectStatement statement = (SelectStatement) QueryProcessor.parseStatement(query).prepare(ClientState.forInternalCalls()).statement; ByteBuffer sessionIdBytes = ByteBufferUtil.bytes(sessionId); InetAddress source = FBUtilities.getBroadcastAddress(); HashSet<UUID>[] seen = new HashSet[] { new HashSet<>(), new HashSet<>() }; int si = 0; UUID uuid; long tlast = System.currentTimeMillis(), tcur; TraceState.Status status; long minWaitMillis = 125; long maxWaitMillis = 1000 * 1024L; long timeout = minWaitMillis; boolean shouldDouble = false; while ((status = state.waitActivity(timeout)) != TraceState.Status.STOPPED) { if (status == TraceState.Status.IDLE) { timeout = shouldDouble ? Math.min(timeout * 2, maxWaitMillis) : timeout; shouldDouble = !shouldDouble; } else { timeout = minWaitMillis; shouldDouble = false; } ByteBuffer tminBytes = ByteBufferUtil.bytes(UUIDGen.minTimeUUID(tlast - 1000)); ByteBuffer tmaxBytes = ByteBufferUtil.bytes(UUIDGen.maxTimeUUID(tcur = System.currentTimeMillis())); QueryOptions options = QueryOptions.forInternalCalls(ConsistencyLevel.ONE, Lists.newArrayList(sessionIdBytes, tminBytes, tmaxBytes)); ResultMessage.Rows rows = statement.execute(QueryState.forInternalCalls(), options, System.nanoTime()); UntypedResultSet result = UntypedResultSet.create(rows.result); for (UntypedResultSet.Row r : result) { if (source.equals(r.getInetAddress("source"))) continue; if ((uuid = r.getUUID("event_id")).timestamp() > (tcur - 1000) * 10000) seen[si].add(uuid); if (seen[si == 0 ? 1 : 0].contains(uuid)) continue; String message = String.format("%s: %s", r.getInetAddress("source"), r.getString("activity")); fireProgressEvent("repair:" + cmd, new ProgressEvent(ProgressEventType.NOTIFICATION, 0, 0, message)); } tlast = tcur; si = si == 0 ? 1 : 0; seen[si].clear(); } } }, "Repair-Runnable-" + threadCounter.incrementAndGet()); } }
package cyclops.data.tuple; import com.oath.cyclops.hkt.DataWitness.tuple2; import com.oath.cyclops.hkt.Higher; import com.oath.cyclops.hkt.Higher2; import com.oath.cyclops.types.foldable.EqualTo; import com.oath.cyclops.types.foldable.OrderedBy; import com.oath.cyclops.types.foldable.To; import cyclops.control.Either; import cyclops.companion.Comparators; import cyclops.function.Memoize; import cyclops.function.Monoid; import lombok.AllArgsConstructor; import java.io.Serializable; import java.util.Objects; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Supplier; /* A Tuple implementation that can be either eager / strict or lazy */ @AllArgsConstructor public class Tuple2<T1,T2> implements To<Tuple2<T1,T2>>, Serializable, EqualTo<Higher<tuple2,T1>,T2,Tuple2<T1,T2>>, OrderedBy<Higher<tuple2,T1>,T2,Tuple2<T1,T2>>, Comparable<Tuple2<T1,T2>>, Higher2<tuple2,T1,T2> { private static final long serialVersionUID = 1L; public static <T1,T2> Tuple2<T1,T2> of(T1 value1, T2 value2) { return new Tuple2<T1,T2>(value1,value2); } public static <T1,T2> Tuple2<T1,T2> lazy(Supplier<? extends T1> supplier1,Supplier<? extends T2> supplier2) { return new Tuple2<T1,T2>(null,null) { @Override public T1 _1() { return supplier1.get(); } @Override public T2 _2() { return supplier2.get(); } }; } private final T1 _1; private final T2 _2; public T1 _1(){ return _1; } public T2 _2(){ return _2; } public Tuple1<T1> first(){ return Tuple.tuple(_1()); } public Tuple1<T2> second(){ return Tuple.tuple(_2()); } public Tuple2<T1,T2> eager(){ return of(_1(),_2()); } public Tuple2<T1,T2> memo(){ Tuple2<T1,T2> host = this; return new Tuple2<T1,T2>(null,null){ final Supplier<T1> memo1 = Memoize.memoizeSupplier(host::_1); final Supplier<T2> memo2 = Memoize.memoizeSupplier(host::_2); @Override public T1 _1() { return memo1.get(); } @Override public T2 _2() { return memo2.get(); } }; } public <R> Tuple2<T1,R> flatMap(Monoid<T1> m,Function<? super T2, ? extends Tuple2<T1,R>> fn){ return fn.apply(_2()).map1(t1->m.apply(t1,_1())); } public <R1> R1 transform(BiFunction<? super T1, ? super T2, ? extends R1> fn1){ return fn1.apply(_1(),_2()); } public <R1,R2> Tuple2<R1,R2> bimap(Function<? super T1, ? extends R1> fn1,Function<? super T2,? extends R2> fn2){ return of((fn1.apply(_1())),fn2.apply(_2())); } public <R1,R2> Tuple2<R1,R2> lazyBimap(Function<? super T1, ? extends R1> fn1,Function<? super T2,? extends R2> fn2){ return lazy(()->(fn1.apply(_1())),()->fn2.apply(_2())); } public <R> Tuple2<R, T2> map1(Function<? super T1, ? extends R> fn) { return of(fn.apply(_1()), _2()); } public <R> Tuple2<R, T2> lazyMap1(Function<? super T1, ? extends R> fn) { return lazy(()->fn.apply(_1()),()-> _2()); } public <R> Tuple2<T1, R> map2(Function<? super T2, ? extends R> fn) { return of(_1(), fn.apply(_2())); } public <R> Tuple2<T1, R> lazyMap2(Function<? super T2, ? extends R> fn) { return lazy(() -> _1(), () -> fn.apply(_2())); } public Tuple2<T2,T1> swap(){ return of(_2(),_1()); } public Tuple2<T2,T1> lazySwap(){ return lazy(()->_2(),()->_1()); } public <R> R fold(BiFunction<? super T1, ? super T2, ? extends R> fn){ return fn.apply(_1(),_2()); } @Override public String toString() { return String.format("[%s,%s]", _1(),_2()); } public static <T1,T2> Tuple2<T1,T2> narrowK2(Higher2<tuple2,T1,T2> ds){ return (Tuple2<T1,T2>)ds; } public static <T1,T2> Tuple2<T1,T2> narrowK(Higher<Higher<tuple2, T1>, T2> ds){ return (Tuple2<T1,T2>)ds; } public static <T1,T2,R> Tuple2<T1,R> tailRec(Monoid<T1> op,T2 initial, Function<? super T2, ? extends Tuple2<T1,? extends Either<T2, R>>> fn){ Tuple2<T1,? extends Either<T2, R>> next[] = new Tuple2[1]; next[0] = Tuple2.of(op.zero(), Either.left(initial)); boolean cont = true; do { cont = next[0].fold((a, p) -> p.fold(s -> { next[0] = narrowK(fn.apply(s)).map1(t1->op.apply(next[0]._1(),t1)); return true; }, __ -> false)); } while (cont); return next[0].map2(x->x.orElse(null)); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || !(o instanceof Tuple2)) return false; Tuple2<?, ?> tuple2 = (Tuple2<?, ?>) o; return Objects.equals(_1(), tuple2._1()) && Objects.equals(_2(), tuple2._2()); } @Override public int hashCode() { return Objects.hash(_1(), _2()); } @Override public int compareTo(Tuple2<T1, T2> o) { int result = Comparators.naturalOrderIdentityComparator().compare(_1(),o._1()); if(result==0){ result = Comparators.naturalOrderIdentityComparator().compare(_2(),o._2()); } return result; } public final Object[] toArray() { return new Object[] { _1(),_2() }; } public static <K, V> Tuple2< K, V> narrow(Tuple2<? extends K, ? extends V> t) { return (Tuple2<K,V>)t; } public static <T1,T2> Higher2<tuple2,T1, T2> widen(Tuple2<T1,T2> narrow) { return narrow; } public <T3> Tuple3<T1, T2,T3> concat(Tuple1<T3> tuple) { return Tuple.tuple(_1(),_2(),tuple._1()); } public <T3, T4> Tuple4<T1, T2, T3, T4> concat(Tuple2<T3,T4> tuple) { return Tuple.tuple(_1(),_2(),tuple._1(),tuple._2()); } public <T3, T4, T5> Tuple5<T1, T2, T3, T4, T5> concat(Tuple3<T3, T4, T5> tuple) { return Tuple.tuple(_1(), _2(),tuple._1(), tuple._2(), tuple._3()); } public <T3, T4, T5, T6> Tuple6<T1, T2, T3, T4, T5, T6> concat(Tuple4<T3, T4, T5, T6> tuple) { return Tuple.tuple(_1(), _2(), tuple._1(), tuple._2(), tuple._3(), tuple._4()); } public < T3, T4, T5, T6, T7> Tuple7<T1, T2, T3, T4, T5, T6, T7> concat(Tuple5<T3, T4, T5, T6, T7> tuple) { return Tuple.tuple(_1(), _2(), tuple._1(), tuple._2(), tuple._3(), tuple._4(), tuple._5()); } public <T3, T4, T5, T6, T7, T8> Tuple8<T1, T2, T3, T4, T5, T6, T7, T8> concat(Tuple6<T3, T4, T5, T6, T7, T8> tuple) { return Tuple.tuple(_1(), _2(),tuple._1(), tuple._2(),tuple._3(), tuple._4(),tuple._5(), tuple._6()); } public <T3> Tuple3<T1, T2,T3> lazyConcat(Tuple1<T3> tuple) { return Tuple.lazy(()->_1(),()->_2,()->tuple._1()); } public <T3, T4> Tuple4<T1, T2, T3, T4> lazyConcat(Tuple2<T3,T4> tuple) { return Tuple.lazy(()->_1(),()->_2,()->tuple._1(),()->tuple._2()); } public <T3, T4, T5> Tuple5<T1, T2, T3, T4, T5> lazyConcat(Tuple3<T3, T4, T5> tuple) { return Tuple.lazy(()->_1(),()->_2, ()->tuple._1(), ()->tuple._2(), ()->tuple._3()); } public <T3, T4, T5, T6> Tuple6<T1, T2, T3, T4, T5, T6>lazyConcat(Tuple4<T3, T4, T5, T6> tuple) { return Tuple.lazy(()->_1(),()->_2, ()->tuple._1(), ()->tuple._2(), ()->tuple._3(), ()->tuple._4()); } public <T3, T4, T5, T6, T7> Tuple7<T1, T2, T3, T4, T5, T6, T7>lazyConcat(Tuple5<T3, T4, T5, T6, T7> tuple) { return Tuple.lazy(()->_1(),()->_2, ()->tuple._1(), ()->tuple._2(), ()->tuple._3(), ()->tuple._4(), ()->tuple._5()); } public < T3, T4, T5, T6, T7, T8> Tuple8<T1, T2, T3, T4, T5, T6, T7, T8> lazyConcat(Tuple6<T3, T4, T5, T6, T7, T8> tuple) { return Tuple.lazy(()->_1(),()->_2,()->tuple._1(), ()->tuple._2(),()-> tuple._3(), ()->tuple._4(),()-> tuple._5(), ()->tuple._6()); } }
package purchases; import com.pascal.terra.TERRALibrary; import android.app.Activity; import android.app.PendingIntent; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.ServiceConnection; import android.os.Bundle; import android.os.IBinder; import android.util.Base64; import android.text.TextUtils; import android.util.Log; import com.android.vending.billing.IInAppBillingService; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.List; import java.util.UUID; //import java.net.URLConnection; import java.security.InvalidKeyException; import java.security.KeyFactory; import java.security.NoSuchAlgorithmException; import java.security.PublicKey; import java.security.Signature; import java.security.SignatureException; import java.security.spec.InvalidKeySpecException; import java.security.spec.X509EncodedKeySpec; public class PurchaseProcessor extends PurchaseBase { public static final int GOOGLE_API_VERSION = 3; public static final String PRODUCT_TYPE_MANAGED = "inapp"; public static final String PRODUCT_TYPE_SUBSCRIPTION = "subs"; public static final int BILLING_RESPONSE_RESULT_OK = 0; //Success public static final int BILLING_RESPONSE_RESULT_USER_CANCELED = 1; //User pressed back or canceled a dialog public static final int BILLING_RESPONSE_RESULT_BILLING_UNAVAILABLE = 3;//Billing API version is not supported for the type requested public static final int BILLING_RESPONSE_RESULT_ITEM_UNAVAILABLE = 4; //Requested product is not available for purchase public static final int BILLING_RESPONSE_RESULT_DEVELOPER_ERROR = 5; //Invalid arguments provided to the API. This error can also indicate that the application was not correctly signed or properly set up for In-app Billing in Google Play, or does not have the necessary permissions in its manifest public static final int BILLING_RESPONSE_RESULT_ERROR = 6; //Fatal error during the API action public static final int BILLING_RESPONSE_RESULT_ITEM_ALREADY_OWNED = 7; //Failure to purchase since item is already owned public static final int BILLING_RESPONSE_RESULT_ITEM_NOT_OWNED = 8; //Failure to consume since item is not owned public static final String RESPONSE_CODE = "RESPONSE_CODE"; public static final String DETAILS_LIST = "DETAILS_LIST"; public static final String PRODUCTS_LIST = "ITEM_ID_LIST"; public static final String BUY_INTENT = "BUY_INTENT"; public static final String INAPP_PURCHASE_DATA_LIST = "INAPP_PURCHASE_DATA_LIST"; public static final String INAPP_PURCHASE_DATA = "INAPP_PURCHASE_DATA"; public static final String RESPONSE_INAPP_SIGNATURE = "INAPP_DATA_SIGNATURE"; public static final String INAPP_DATA_SIGNATURE_LIST = "INAPP_DATA_SIGNATURE_LIST"; public static final String RESPONSE_ORDER_ID = "orderId"; public static final String RESPONSE_PRODUCT_ID = "productId"; public static final String RESPONSE_TYPE = "type"; public static final String RESPONSE_TITLE = "title"; public static final String RESPONSE_DESCRIPTION = "description"; public static final String RESPONSE_PRICE = "price"; public static final String RESPONSE_PRICE_CURRENCY = "price_currency_code"; public static final String RESPONSE_PRICE_MICROS = "price_amount_micros"; public static final String RESPONSE_PURCHASE_TOKEN = "purchaseToken"; public static final String RESPONSE_PURCHASE_TIME = "purchaseTime"; public static final String RESPONSE_PAYLOAD = "developerPayload"; public static final int BILLING_ERROR_FAILED_LOAD_PURCHASES = 100; public static final int BILLING_ERROR_FAILED_TO_INITIALIZE_PURCHASE = 101; public static final int BILLING_ERROR_INVALID_SIGNATURE = 102; public static final int BILLING_ERROR_LOST_CONTEXT = 103; public static final int BILLING_ERROR_OTHER_ERROR = 110; /** * Callback methods where billing events are reported. * Apps must implement one of these to construct a PurchaseProcessor. */ public static interface IBillingHandler { void onProductPurchased(String productId, TransactionDetails details); void onPurchaseHistoryRestored(); void onBillingError(int errorCode, Throwable error); void onBillingDisabled(); void onBillingInitialized(); } private static final int PURCHASE_FLOW_REQUEST_CODE = 11784; private static final String LOG_TAG = "purchases"; private static final String SETTINGS_VERSION = ".v6"; private static final String RESTORE_KEY = ".bills.restored" + SETTINGS_VERSION; private static final String MANAGED_PRODUCTS_CACHE_KEY = ".products.cache" + SETTINGS_VERSION; private static final String SUBSCRIPTIONS_CACHE_KEY = ".subscriptions.cache" + SETTINGS_VERSION; private static final String PURCHASE_PAYLOAD_CACHE_KEY = ".purchase.last" + SETTINGS_VERSION; private IInAppBillingService billingService; private String contextPackageName; private PurchaseCache cachedProducts; private PurchaseCache cachedSubscriptions; private IBillingHandler eventHandler; private ServiceConnection serviceConnection = new ServiceConnection() { @Override public void onServiceDisconnected(ComponentName name) { billingService = null; if (eventHandler != null) eventHandler.onBillingDisabled(); } @Override public void onServiceConnected(ComponentName name, IBinder service) { billingService = IInAppBillingService.Stub.asInterface(service); if (!isPurchaseHistoryRestored() && loadOwnedPurchasesFromGoogle()) { setPurchaseHistoryRestored(); if (eventHandler != null) eventHandler.onPurchaseHistoryRestored(); } if (eventHandler != null) eventHandler.onBillingInitialized(); } }; private static String verifyServiceKey() { Log.d("BILLING", "Getting Billing key..."); String localKey = TERRALibrary.ApplicationIAPGetID(); return localKey; } public PurchaseProcessor(Activity context, IBillingHandler handler) { super(context); eventHandler = handler; contextPackageName = context.getApplicationContext().getPackageName(); cachedProducts = new PurchaseCache(context, MANAGED_PRODUCTS_CACHE_KEY); cachedSubscriptions = new PurchaseCache(context, SUBSCRIPTIONS_CACHE_KEY); bindPlayServices(); } private void bindPlayServices() { try { Intent iapIntent = new Intent("com.android.vending.billing.InAppBillingService.BIND"); iapIntent.setPackage("com.android.vending"); getContext().bindService(iapIntent, serviceConnection, Context.BIND_AUTO_CREATE); } catch (Exception e) { Log.e(LOG_TAG, e.toString()); } } @Override public void release() { if (serviceConnection != null && getContext() != null) { try { getContext().unbindService(serviceConnection); } catch (Exception e) { Log.e(LOG_TAG, e.toString()); } billingService = null; } cachedProducts.release(); super.release(); } public boolean isInitialized() { return billingService != null; } public boolean isPurchased(String productId) { return cachedProducts.includesProduct(productId); } public boolean isSubscribed(String productId) { return cachedSubscriptions.includesProduct(productId); } public List<String> listOwnedProducts() { return cachedProducts.getContents(); } public List<String> listOwnedSubscriptions() { return cachedSubscriptions.getContents(); } private boolean loadPurchasesByType(String type, PurchaseCache cacheStorage) { if (!isInitialized()) return false; try { Bundle bundle = billingService.getPurchases(PurchaseProcessor.GOOGLE_API_VERSION, contextPackageName, type, null); if (bundle.getInt(PurchaseProcessor.RESPONSE_CODE) == PurchaseProcessor.BILLING_RESPONSE_RESULT_OK) { cacheStorage.clear(); ArrayList<String> purchaseList = bundle.getStringArrayList(PurchaseProcessor.INAPP_PURCHASE_DATA_LIST); ArrayList<String> signatureList = bundle.getStringArrayList(PurchaseProcessor.INAPP_DATA_SIGNATURE_LIST); for (int i = 0; i < purchaseList.size(); i++) { String jsonData = purchaseList.get(i); JSONObject purchase = new JSONObject(jsonData); String signature = signatureList != null && signatureList.size() > i ? signatureList.get(i) : null; cacheStorage.put(purchase.getString(PurchaseProcessor.RESPONSE_PRODUCT_ID), jsonData, signature); } } return true; } catch (Exception e) { if (eventHandler != null) eventHandler.onBillingError(PurchaseProcessor.BILLING_ERROR_FAILED_LOAD_PURCHASES, e); Log.e(LOG_TAG, e.toString()); } return false; } public boolean loadOwnedPurchasesFromGoogle() { return isInitialized() && loadPurchasesByType(PurchaseProcessor.PRODUCT_TYPE_MANAGED, cachedProducts) && loadPurchasesByType(PurchaseProcessor.PRODUCT_TYPE_SUBSCRIPTION, cachedSubscriptions); } public boolean purchase(String productId) { return purchase(productId, PurchaseProcessor.PRODUCT_TYPE_MANAGED); } public boolean subscribe(String productId) { return purchase(productId, PurchaseProcessor.PRODUCT_TYPE_SUBSCRIPTION); } private boolean purchase(String productId, String purchaseType) { if (!isInitialized() || TextUtils.isEmpty(productId) || TextUtils.isEmpty(purchaseType)) return false; try { String purchasePayload = purchaseType + ":" + UUID.randomUUID().toString(); savePurchasePayload(purchasePayload); Bundle bundle = billingService.getBuyIntent(PurchaseProcessor.GOOGLE_API_VERSION, contextPackageName, productId, purchaseType, purchasePayload); if (bundle != null) { int response = bundle.getInt(PurchaseProcessor.RESPONSE_CODE); if (response == PurchaseProcessor.BILLING_RESPONSE_RESULT_OK) { PendingIntent pendingIntent = bundle.getParcelable(PurchaseProcessor.BUY_INTENT); if (getContext() != null) getContext().startIntentSenderForResult(pendingIntent.getIntentSender(), PURCHASE_FLOW_REQUEST_CODE, new Intent(), 0, 0, 0); else if (eventHandler != null) eventHandler.onBillingError(PurchaseProcessor.BILLING_ERROR_LOST_CONTEXT, null); } else if (response == PurchaseProcessor.BILLING_RESPONSE_RESULT_ITEM_ALREADY_OWNED) { if (!isPurchased(productId) && !isSubscribed(productId)) { loadOwnedPurchasesFromGoogle(); } if (eventHandler != null) { TransactionDetails details = getPurchaseTransactionDetails(productId); if (details == null) details = getSubscriptionTransactionDetails(productId); eventHandler.onProductPurchased(productId, details); } } else if (eventHandler != null) eventHandler.onBillingError(PurchaseProcessor.BILLING_ERROR_FAILED_TO_INITIALIZE_PURCHASE, null); } return true; } catch (Exception e) { Log.e(LOG_TAG, e.toString()); } return false; } private TransactionDetails getPurchaseTransactionDetails(String productId, PurchaseCache cache) { PurchaseInfo details = cache.getDetails(productId); if (details != null && !TextUtils.isEmpty(details.responseData)) { try { return new TransactionDetails(details); } catch (JSONException e) { Log.e(LOG_TAG, "Failed to load saved purchase details for " + productId); } } return null; } public boolean consumePurchase(String productId) { if (!isInitialized()) return false; try { TransactionDetails transactionDetails = getPurchaseTransactionDetails(productId, cachedProducts); if (transactionDetails != null && !TextUtils.isEmpty(transactionDetails.purchaseToken)) { int response = billingService.consumePurchase(PurchaseProcessor.GOOGLE_API_VERSION, contextPackageName, transactionDetails.purchaseToken); if (response == PurchaseProcessor.BILLING_RESPONSE_RESULT_OK) { cachedProducts.remove(productId); Log.d(LOG_TAG, "Successfully consumed " + productId + " purchase."); return true; } else { if (eventHandler != null) eventHandler.onBillingError(response, null); Log.e(LOG_TAG, String.format("Failed to consume %s: error %d", productId, response)); } } } catch (Exception e) { Log.e(LOG_TAG, e.toString()); } return false; } private SkuDetails getSkuDetails(String productId, String purchaseType) { if (billingService != null) { try { ArrayList<String> skuList = new ArrayList<String>(); skuList.add(productId); Bundle products = new Bundle(); products.putStringArrayList(PurchaseProcessor.PRODUCTS_LIST, skuList); Bundle skuDetails = billingService.getSkuDetails(PurchaseProcessor.GOOGLE_API_VERSION, contextPackageName, purchaseType, products); int response = skuDetails.getInt(PurchaseProcessor.RESPONSE_CODE); if (response == PurchaseProcessor.BILLING_RESPONSE_RESULT_OK) { for (String responseLine : skuDetails.getStringArrayList(PurchaseProcessor.DETAILS_LIST)) { JSONObject object = new JSONObject(responseLine); String responseProductId = object.getString(PurchaseProcessor.RESPONSE_PRODUCT_ID); if (productId.equals(responseProductId)) return new SkuDetails(object); } } else { if (eventHandler != null) eventHandler.onBillingError(response, null); Log.e(LOG_TAG, String.format("Failed to retrieve info for %s: error %d", productId, response)); } } catch (Exception e) { Log.e(LOG_TAG, String.format("Failed to call getSkuDetails %s", e.toString())); } } return null; } public SkuDetails getPurchaseListingDetails(String productId) { return getSkuDetails(productId, PurchaseProcessor.PRODUCT_TYPE_MANAGED); } public SkuDetails getSubscriptionListingDetails(String productId) { return getSkuDetails(productId, PurchaseProcessor.PRODUCT_TYPE_SUBSCRIPTION); } public TransactionDetails getPurchaseTransactionDetails(String productId) { return getPurchaseTransactionDetails(productId, cachedProducts); } public TransactionDetails getSubscriptionTransactionDetails(String productId) { return getPurchaseTransactionDetails(productId, cachedSubscriptions); } public void processActivity(int resultCode, Intent data) { int responseCode = data.getIntExtra(PurchaseProcessor.RESPONSE_CODE, PurchaseProcessor.BILLING_RESPONSE_RESULT_OK); Log.d(LOG_TAG, String.format("resultCode = %d, responseCode = %d", resultCode, responseCode)); String purchasePayload = getPurchasePayload(); if (resultCode == Activity.RESULT_OK && responseCode == PurchaseProcessor.BILLING_RESPONSE_RESULT_OK && !TextUtils.isEmpty(purchasePayload)) { String purchaseData = data.getStringExtra(PurchaseProcessor.INAPP_PURCHASE_DATA); String dataSignature = data.getStringExtra(PurchaseProcessor.RESPONSE_INAPP_SIGNATURE); try { JSONObject purchase = new JSONObject(purchaseData); String productId = purchase.getString(PurchaseProcessor.RESPONSE_PRODUCT_ID); String developerPayload = purchase.getString(PurchaseProcessor.RESPONSE_PAYLOAD); if (developerPayload == null) developerPayload = ""; boolean purchasedSubscription = purchasePayload.startsWith(PurchaseProcessor.PRODUCT_TYPE_SUBSCRIPTION); if (purchasePayload.equals(developerPayload)) { if (verifyPurchaseSignature(purchaseData, dataSignature)) { PurchaseCache cache = purchasedSubscription ? cachedSubscriptions : cachedProducts; cache.put(productId, purchaseData, dataSignature); if (eventHandler != null) { eventHandler.onProductPurchased(productId, new TransactionDetails(new PurchaseInfo(purchaseData, dataSignature))); } Log.e(LOG_TAG, "Purchase done"); } else { Log.e(LOG_TAG, "Public key signature doesn't match!"); if (eventHandler != null) eventHandler.onBillingError(PurchaseProcessor.BILLING_ERROR_INVALID_SIGNATURE, null); } } else { Log.e(LOG_TAG, String.format("Payload mismatch: %s != %s", purchasePayload, developerPayload)); if (eventHandler != null) eventHandler.onBillingError(PurchaseProcessor.BILLING_ERROR_INVALID_SIGNATURE, null); } } catch (Exception e) { Log.e(LOG_TAG, e.toString()); if (eventHandler != null) eventHandler.onBillingError(PurchaseProcessor.BILLING_ERROR_OTHER_ERROR, null); } } else { if (eventHandler != null) eventHandler.onBillingError(PurchaseProcessor.BILLING_ERROR_OTHER_ERROR, null); } } public boolean handleActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode != PURCHASE_FLOW_REQUEST_CODE) { return false; } processActivity(resultCode, data); /* new AsyncTask<Void, Void, Void>() { @Override protected void onPreExecute() { // here is for code you do before the network call. you // leave it empty } @Override protected Void doInBackground(Void... params) { Log.d(LOG_TAG, "Starting async receipt"); } @Override protected void onPostExecute(Void res) { // here goes your UI code. i.e if you want to hide a button } }.execute(); */ return true; } private static final String KEY_FACTORY_ALGORITHM = "RSA"; private static final String SIGNATURE_ALGORITHM = "SHA1withRSA"; /** * Generates a PublicKey instance from a string containing the * Base64-encoded public key. * * @param encodedPublicKey Base64-encoded public key * @throws IllegalArgumentException if encodedPublicKey is invalid */ public static PublicKey getPublicKey(String encodedPublicKey) { try { byte[] decodedKey = Base64.decode(encodedPublicKey, Base64.DEFAULT); KeyFactory keyFactory = KeyFactory.getInstance(KEY_FACTORY_ALGORITHM); return keyFactory.generatePublic(new X509EncodedKeySpec(decodedKey)); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } catch (InvalidKeySpecException e) { Log.e(LOG_TAG, "Invalid key specification."); throw new IllegalArgumentException(e); } catch (IllegalArgumentException e) { Log.e(LOG_TAG, "Base64 decoding failed."); throw e; } } public static final String TERRA_BILLING_KEY = "ACGs1a7KNIlawqtS0rI0Zp16q+7mfNV+wlwou3QAyRAfQhP8aYoeJKACAOm6W8oZBLLTrwGyfl8U26bbUrma8xJ/fKKcO2+ZfepQDXYjEv/3ZnMpt5UKrdiLst4g6QpwZBEIyFrxfZ7saaWl1YLXJx6eVs3SdtV5K/clQxxp8Lj5"; private static boolean confirmPurchaseSignature(String signedData, String signature) { PublicKey key = getPublicKey(TERRA_BILLING_KEY); Signature sig; try { sig = Signature.getInstance(SIGNATURE_ALGORITHM); sig.initVerify(key); sig.update(signedData.getBytes()); if (!sig.verify(Base64.decode(signature, Base64.DEFAULT))) { Log.e(LOG_TAG, "TERRA Signature verification failed."); return true; } return false; } catch (NoSuchAlgorithmException e) { Log.e(LOG_TAG, "NoSuchAlgorithmException."); } catch (InvalidKeyException e) { Log.e(LOG_TAG, "Invalid key specification."); } catch (SignatureException e) { Log.e(LOG_TAG, "Signature exception."); } catch (IllegalArgumentException e) { Log.e(LOG_TAG, "Base64 decoding failed."); } return true; } /** * Verifies that the data was signed with the given signature, and returns * the verified purchase. The data is in JSON format and signed * with a private key. The data also contains the {@link PurchaseState} * and product ID of the purchase. * @param base64PublicKey the base64-encoded public key to use for verifying. * @param signedData the signed JSON string (signed, not encrypted) * @param signature the signature for the data, signed with the private key */ private static boolean verifyPurchaseSignature(String signedData, String signature) { String localKey = verifyServiceKey(); if (!TextUtils.isEmpty(localKey)) { try { //return Security.verifyPurchase(localKey, purchaseData, dataSignature); if (TextUtils.isEmpty(signedData) || TextUtils.isEmpty(localKey) || TextUtils.isEmpty(signature)) { Log.e(LOG_TAG, "Purchase verification failed: missing data."); return false; } PublicKey key = getPublicKey(localKey); Signature sig; try { sig = Signature.getInstance(SIGNATURE_ALGORITHM); sig.initVerify(key); sig.update(signedData.getBytes()); if (!sig.verify(Base64.decode(signature, Base64.DEFAULT))) { Log.e(LOG_TAG, "Signature verification failed."); return false; } return true; //return confirmPurchaseSignature(signedData, signature); } catch (NoSuchAlgorithmException e) { Log.e(LOG_TAG, "NoSuchAlgorithmException."); } catch (InvalidKeyException e) { Log.e(LOG_TAG, "Invalid key specification."); } catch (SignatureException e) { Log.e(LOG_TAG, "Signature exception."); } catch (IllegalArgumentException e) { Log.e(LOG_TAG, "Base64 decoding failed."); } return false; /* HttpClient httpClient = new DefaultHttpClient(); HttpContext localContext = new BasicHttpContext(); HttpGet httpGet = new HttpGet("http://minimonworld.com/API/purchase.php?receipt="+purchaseData+"&signature="+dataSignature); String text = null; HttpResponse response = httpClient.execute(httpGet, localContext); HttpEntity entity = response.getEntity(); text = getASCIIContentFromEntity(entity); return (text!=null && text.indexOf("success")>=0);*/ } catch (Exception e) { return false; } } return false; } private boolean isPurchaseHistoryRestored() { return loadBoolean(getPreferencesBaseKey() + RESTORE_KEY, false); } public void setPurchaseHistoryRestored() { saveBoolean(getPreferencesBaseKey() + RESTORE_KEY, true); } private void savePurchasePayload(String value) { saveString(getPreferencesBaseKey() + PURCHASE_PAYLOAD_CACHE_KEY, value); } private String getPurchasePayload() { return loadString(getPreferencesBaseKey() + PURCHASE_PAYLOAD_CACHE_KEY, null); } }
package com.stanfy.enroscar.goro; import android.content.Context; import android.os.IBinder; import java.util.concurrent.Callable; import java.util.concurrent.Executor; import static com.stanfy.enroscar.goro.BoundGoro.BoundGoroImpl; /** * Handles tasks in multiple queues. */ public abstract class Goro { /** Default queue name. */ public static final String DEFAULT_QUEUE = "default"; /** * Gives access to Goro instance that is provided by a service. * @param binder Goro service binder * @return Goro instance provided by the service */ public static Goro from(final IBinder binder) { if (binder instanceof GoroService.GoroBinder) { return ((GoroService.GoroBinder) binder).goro(); } throw new IllegalArgumentException("Cannot get Goro from " + binder); } /** * Creates a new Goro instance which uses {@link android.os.AsyncTask#THREAD_POOL_EXECUTOR} * to delegate tasks on Post-Honeycomb devices or create a separate thread pool on earlier * Android versions. * @return instance of Goro */ public static Goro create() { return new GoroImpl(); } /** * Creates a new Goro instance which uses the specified executor to delegate tasks. * @param delegateExecutor executor Goro delegates tasks to * @return instance of Goro */ public static Goro createWithDelegate(final Executor delegateExecutor) { GoroImpl goro = new GoroImpl(); goro.queues.setDelegateExecutor(delegateExecutor); return goro; } /** * Creates a Goro implementation that binds to {@link com.stanfy.enroscar.goro.GoroService} * in order to run scheduled tasks in service context. * <p> * This method is functionally identical to * </p> * <pre> * BoundGoro goro = Goro.bindWith(context, new BoundGoro.OnUnexpectedDisconnection() { * public void onServiceDisconnected(BoundGoro goro) { * goro.bind(); * } * }); * </pre> * @param context context that will bind to the service * @return Goro implementation that binds to {@link GoroService}. * @see #bindWith(Context, BoundGoro.OnUnexpectedDisconnection) */ public static BoundGoro bindAndAutoReconnectWith(final Context context) { if (context == null) { throw new IllegalArgumentException("Context cannot be null"); } return new BoundGoroImpl(context, null); } /** * Creates a Goro implementation that binds to {@link GoroService} * in order to run scheduled tasks in service context. * {@code BoundGoro} exposes {@code bind()} and {@code unbind()} methods that you can use to connect to * and disconnect from the worker service in other component lifecycle callbacks * (like {@code onStart}/{@code onStop} in {@code Activity} or {@code onCreate}/{@code onDestory} in {@code Service}). * <p> * The worker service ({@code GoroService}) normally stops when all {@code BoundGoro} instances unbind * and all the pending tasks in {@code Goro} queues are handled. * But it can also be stopped by the system server (due to a user action in Settings app or application update). * In this case {@code BoundGoro} created with this method will notify the supplied handler about the event. * </p> * @param context context that will bind to the service * @param handler callback to invoke if worker service is unexpectedly stopped by the system server * @return Goro implementation that binds to {@link GoroService}. */ public static BoundGoro bindWith(final Context context, final BoundGoro.OnUnexpectedDisconnection handler) { if (context == null) { throw new IllegalArgumentException("Context cannot be null"); } if (handler == null) { throw new IllegalArgumentException("Disconnection handler cannot be null"); } return new BoundGoroImpl(context, handler); } /** * Creates a Goro implementation that binds to a worker service to schedule tasks. * This implementation binds to the backing service when one of {@code Goro} methods is invoked, * then delegates all the tasks to the service and unbinds asap. * @param context context that will bind to the service * @return Goro implementation that binds to {@link GoroService} */ public static Goro bindOnDemandWith(final Context context) { if (context == null) { throw new IllegalArgumentException("Context cannot be null"); } return new OnDemandBindingGoro(context); } /** * Adds a task execution listener. Must be called from the main thread. * @param listener listener instance */ public abstract void addTaskListener(final GoroListener listener); /** * Removes a task execution listener. Must be called from the main thread. * @param listener listener instance */ public abstract void removeTaskListener(final GoroListener listener); /** * Add a task to the default queue. * This methods returns a future that allows to control task execution. * @param task task instance * @return task future instance */ public abstract <T> ObservableFuture<T> schedule(final Callable<T> task); /** * Add a task to the specified queue. * This methods returns a future that allows to control task execution. * Queue name may be null, if you want to execute the task beyond any queue. * @param queueName name of a queue to use, may be null * @param task task instance * @return task future instance */ public abstract <T> ObservableFuture<T> schedule(final String queueName, final Callable<T> task); /** * Returns an executor for performing tasks in a specified queue. If queue name is null, * {@link #DEFAULT_QUEUE} is used. * @param queueName queue name * @return executor instance that performs tasks serially in a specified queue */ public abstract Executor getExecutor(final String queueName); /** * Removes all the pending tasks from a specified queue. * @param queueName queue name, must not be {@code null} */ public final void clear(final String queueName) { if (queueName == null) { throw new IllegalArgumentException("Queue name must not be null"); } removeTasksInQueue(queueName); } protected abstract void removeTasksInQueue(final String queueName); /** Main implementation. */ static class GoroImpl extends Goro { /** Listeners handler. */ final ListenersHandler listenersHandler = new ListenersHandler(); /** Queues. */ private final Queues queues; GoroImpl() { this(new Queues.Impl()); } GoroImpl(final Queues queues) { this.queues = queues; } @Override public void addTaskListener(final GoroListener listener) { listenersHandler.addTaskListener(listener); } @Override public void removeTaskListener(final GoroListener listener) { listenersHandler.removeTaskListenerOrThrow(listener); } @Override public <T> ObservableFuture<T> schedule(final Callable<T> task) { return schedule(DEFAULT_QUEUE, task); } @Override public <T> ObservableFuture<T> schedule(final String queueName, final Callable<T> task) { if (task == null) { throw new IllegalArgumentException("Task must not be null"); } GoroFuture<T> future = new GoroFuture<>(this, task); listenersHandler.postSchedule(task, queueName); queues.getExecutor(queueName).execute(future); return future; } @Override public Executor getExecutor(final String queueName) { return queues.getExecutor(queueName == null ? DEFAULT_QUEUE : queueName); } @Override protected void removeTasksInQueue(final String queueName) { queues.clear(queueName); } } }
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.android.run; import com.android.ddmlib.AndroidDebugBridge; import com.android.ddmlib.IDevice; import com.android.sdklib.IAndroidTarget; import com.android.sdklib.SdkConstants; import com.android.sdklib.internal.avd.AvdManager; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Computable; import org.jetbrains.android.ddms.AdbManager; import org.jetbrains.android.ddms.AdbNotRespondingException; import org.jetbrains.android.facet.AndroidFacet; import org.jetbrains.android.sdk.AndroidSdk; import org.jetbrains.android.util.AndroidBundle; import org.jetbrains.android.util.AndroidUtils; import org.jetbrains.android.util.BooleanCellRenderer; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.table.AbstractTableModel; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.io.File; /** * Created by IntelliJ IDEA. * User: Eugene.Kudelevsky * Date: May 9, 2009 * Time: 5:33:45 PM * To change this template use File | Settings | File Templates. */ public class AvdChooser extends DialogWrapper { private JTable myAvdTable; private JPanel myPanel; private JButton myRefreshButton; private JButton myCreateButton; private JButton myRemoveButton; private JButton myStartAvdManagerButton; private final AvdManager myAvdManager; private final AndroidFacet myFacet; private final boolean myCompatibleAvd; private final boolean myMustSelect; private static final String[] COLUMN_TITLES = new String[]{"Name", "Target", "Platform", "API Level", "Valid", "Compatible"}; @Nullable private static String getAndroidToolPath(@NotNull AndroidFacet facet) { AndroidSdk sdk = facet.getConfiguration().getAndroidSdk(); if (sdk == null) return null; String androidCmd = SdkConstants.androidCmdName(); return sdk.getLocation() + File.separator + AndroidUtils.toolPath(androidCmd); } public AvdChooser(@NotNull final Project project, @NotNull final AndroidFacet facet, @NotNull AvdManager avdManager, boolean mustSelect, boolean compatibleAvd) { super(project, true); myMustSelect = mustSelect; setTitle(AndroidBundle.message("avd.dialog.title")); init(); myAvdManager = avdManager; myFacet = facet; myAvdTable.setModel(new MyAvdTableModel(myFacet.getAllAvds())); myAvdTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); myCompatibleAvd = compatibleAvd; myRefreshButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { updateTable(); } }); myCreateButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { createAvd(project, facet); } }); myRemoveButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { removeSelectedAvd(); } }); myAvdTable.setDefaultRenderer(Boolean.class, new BooleanCellRenderer()); myAvdTable.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { if (e.getClickCount() == 2 && e.getButton() == MouseEvent.BUTTON1 && isOKActionEnabled()) { doOKAction(); } } }); final String androidToolPath = getAndroidToolPath(facet); myStartAvdManagerButton.setEnabled(new File(androidToolPath).exists()); myStartAvdManagerButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { GeneralCommandLine commandLine = new GeneralCommandLine(); commandLine.setExePath(androidToolPath); AndroidUtils.runExternalToolInSeparateThread(project, commandLine, null); } }); updateTable(); ListSelectionModel selectionModel = myAvdTable.getSelectionModel(); if (mustSelect) { if (myAvdTable.getModel().getRowCount() > 0) { selectionModel.setSelectionInterval(0, 0); } } selectionModel.addListSelectionListener(new ListSelectionListener() { public void valueChanged(ListSelectionEvent e) { updateOkAction(); } }); updateOkAction(); myAvdTable.requestFocus(); } private void updateOkAction() { AvdManager.AvdInfo avd = getSelectedAvd(); getOKAction().setEnabled(avd != null ? avd.getStatus() == AvdManager.AvdInfo.AvdStatus.OK : myMustSelect); } @Override public JComponent getPreferredFocusedComponent() { return myAvdTable; } private void createAvd(Project project, AndroidFacet facet) { CreateAvdDialog dialog = new CreateAvdDialog(project, facet, myAvdManager, false, false); dialog.show(); if (dialog.getExitCode() == OK_EXIT_CODE) { updateTable(); } } private boolean isAvdBusy(@NotNull AvdManager.AvdInfo avd) throws AdbNotRespondingException { final AndroidDebugBridge bridge = myFacet.getDebugBridge(); if (bridge == null) return false; IDevice[] devices = AdbManager.compute(new Computable<IDevice[]>() { public IDevice[] compute() { return bridge.getDevices(); } }, true); for (IDevice device : devices) { String avdName = device.getAvdName(); if (avdName != null && avdName.equals(avd.getName())) { return true; } } return false; } private void removeSelectedAvd() { AvdManager.AvdInfo selectedAvd = getSelectedAvd(); if (selectedAvd != null) { try { if (isAvdBusy(selectedAvd)) { Messages.showErrorDialog(myPanel, AndroidBundle.message("cant.remove.avd.error")); } else { myAvdManager.deleteAvd(selectedAvd, AndroidUtils.getSdkLog(myPanel)); updateTable(); } } catch (AdbNotRespondingException e) { Messages.showErrorDialog(myPanel, e.getMessage()); } } } private void updateTable() { int selected = myAvdTable.getSelectedRow(); myAvdTable.setModel(new MyAvdTableModel(myFacet.getAllAvds())); myAvdTable.getSelectionModel().setSelectionInterval(selected, selected); } protected JComponent createCenterPanel() { return myPanel; } @Nullable public AvdManager.AvdInfo getSelectedAvd() { MyAvdTableModel model = (MyAvdTableModel)myAvdTable.getModel(); int selectedRow = myAvdTable.getSelectedRow(); return selectedRow >= 0 && selectedRow < model.myInfos.length ? model.myInfos[selectedRow] : null; } public void setSelectedAvd(@NotNull String avdName) { MyAvdTableModel model = (MyAvdTableModel)myAvdTable.getModel(); for (int i = 0; i < model.myInfos.length; i++) { AvdManager.AvdInfo info = model.myInfos[i]; if (info.getName().equals(avdName)) { myAvdTable.getSelectionModel().setSelectionInterval(i, i); } } } private class MyAvdTableModel extends AbstractTableModel { private final AvdManager.AvdInfo[] myInfos; public MyAvdTableModel(AvdManager.AvdInfo[] infos) { myInfos = infos; } @Override public String getColumnName(int column) { return COLUMN_TITLES[column]; } public int getRowCount() { return myInfos.length; } public int getColumnCount() { return COLUMN_TITLES.length; } @Nullable public Object getValueAt(int rowIndex, int columnIndex) { AvdManager.AvdInfo info = myInfos[rowIndex]; IAndroidTarget target = info.getTarget(); final String unknown = "<unknown>"; switch (columnIndex) { case 0: return info.getName(); case 1: return target != null ? target.getName() : unknown; case 2: return target != null ? target.getVersionName() : unknown; case 3: return target != null ? target.getVersion().getApiString() : unknown; case 4: return info.getStatus() == AvdManager.AvdInfo.AvdStatus.OK; case 5: return myFacet.isCompatibleAvd(info); } return null; } @Override public Class<?> getColumnClass(int columnIndex) { if (columnIndex == 4 || columnIndex == 5) { return Boolean.class; } return String.class; } } @Override protected String getHelpId() { return "reference.android.selectAVD"; } @Override protected void doOKAction() { AvdManager.AvdInfo selectedAvd = getSelectedAvd(); if (myCompatibleAvd && selectedAvd != null && !myFacet.isCompatibleAvd(selectedAvd)) { Messages.showErrorDialog(myPanel, AndroidBundle.message("select.compatible.avd.error")); return; } super.doOKAction(); } }
/** * Sapelli data collection platform: http://sapelli.org * * Copyright 2012-2014 University College London - ExCiteS group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.ac.ucl.excites.sapelli.collector.control; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Stack; import org.apache.commons.io.FileUtils; import uk.ac.ucl.excites.sapelli.collector.db.ProjectStore; import uk.ac.ucl.excites.sapelli.collector.io.FileStorageException; import uk.ac.ucl.excites.sapelli.collector.io.FileStorageProvider; import uk.ac.ucl.excites.sapelli.collector.model.Field; import uk.ac.ucl.excites.sapelli.collector.model.FieldParameters; import uk.ac.ucl.excites.sapelli.collector.model.Form; import uk.ac.ucl.excites.sapelli.collector.model.Form.Next; import uk.ac.ucl.excites.sapelli.collector.model.Project; import uk.ac.ucl.excites.sapelli.collector.model.Trigger; import uk.ac.ucl.excites.sapelli.collector.model.fields.BelongsToField; import uk.ac.ucl.excites.sapelli.collector.model.fields.ButtonField; import uk.ac.ucl.excites.sapelli.collector.model.fields.CheckBoxField; import uk.ac.ucl.excites.sapelli.collector.model.fields.ChoiceField; import uk.ac.ucl.excites.sapelli.collector.model.fields.EndField; import uk.ac.ucl.excites.sapelli.collector.model.fields.LabelField; import uk.ac.ucl.excites.sapelli.collector.model.fields.LinksToField; import uk.ac.ucl.excites.sapelli.collector.model.fields.LocationField; import uk.ac.ucl.excites.sapelli.collector.model.fields.MediaField; import uk.ac.ucl.excites.sapelli.collector.model.fields.MultiListField; import uk.ac.ucl.excites.sapelli.collector.model.fields.OrientationField; import uk.ac.ucl.excites.sapelli.collector.model.fields.Page; import uk.ac.ucl.excites.sapelli.collector.model.fields.TextBoxField; import uk.ac.ucl.excites.sapelli.collector.ui.CollectorUI; import uk.ac.ucl.excites.sapelli.shared.io.FileHelpers; import uk.ac.ucl.excites.sapelli.shared.util.CollectionUtils; import uk.ac.ucl.excites.sapelli.shared.util.ExceptionHelpers; import uk.ac.ucl.excites.sapelli.shared.util.Logger; import uk.ac.ucl.excites.sapelli.storage.db.RecordStore; import uk.ac.ucl.excites.sapelli.storage.model.Record; import uk.ac.ucl.excites.sapelli.storage.model.RecordReference; import uk.ac.ucl.excites.sapelli.storage.model.columns.ForeignKeyColumn; import uk.ac.ucl.excites.sapelli.storage.queries.constraints.Constraint; import uk.ac.ucl.excites.sapelli.storage.types.Location; /** * Abstract Controller class * * @param <CUI> * * @author mstevens, Michalis Vitos, Julia */ public abstract class Controller<CUI extends CollectorUI<?, ?>> implements FieldVisitor { // STATICS------------------------------------------------------- protected static final String LOG_PREFIX = "Collector_"; public static final int VIBRATION_DURATION_MS = 600; /** * The mode in which a {@link Form} is opened */ public static enum Mode { CREATE, EDIT, //SELECT } /** * Determines what should happen when the current field is (attempted to be) left. */ public static enum LeaveRule { /** * Leaving the current field will only be allowed if validation (& value storage) is successful. */ CONDITIONAL, /** * The current field must be unconditionally left but valid values may be stored. */ UNCONDITIONAL_WITH_STORAGE, /** * The current field must be unconditionally left without any validation or value storage happening. */ UNCONDITIONAL_NO_STORAGE } /** * Checks whether the given field is to be shown in the current Mode. * Note that disabled fields may still be shown (e.g. displayed grayed-out). * * @param mode * @param field * @return */ static public boolean IsFieldToBeShown(Mode mode, Field field) { switch(mode) { case CREATE: return field.isShowOnCreate(); case EDIT: return field.isShowOnEdit(); default: throw new IllegalStateException("Unknown Mode: " + mode.name()); } } /** * Checks whether the given field is currently enabled. * While disabled fields may still be shown, a field that is *not* allowed to be shown is always disabled. * * @param currFormSession * @param field * @return */ static public boolean IsFieldEnabled(FormSession currFormSession, Field field) { return IsFieldEnabled(currFormSession.mode, currFormSession.getRuntimeEnabled(field), field); } /** * Checks whether the given field is currently enabled. * While disabled fields may still be shown, a field that is *not* allowed to be shown is always disabled. * * @param mode * @param field * @return */ static public boolean IsFieldEnabled(Mode mode, Field field) { return IsFieldEnabled(mode, null, field); } /** * Checks whether the given field is currently enabled. * While disabled fields may still be shown, a field that is *not* allowed to be shown is always disabled. * * @param mode * @param runtimeEnabled * @param field * @return */ static public boolean IsFieldEnabled(Mode mode, Boolean runtimeEnabled, Field field) { return // a field that is *not* allowed to be shown is always disabled: IsFieldToBeShown(mode, field) // "runtime enabledness" (kept in FormSession, but rarely used) has preference over "static enabledness" (kept in the Field object itself, true by default): && (runtimeEnabled != null ? runtimeEnabled : field.isEnabled()) // when in EDIT mode the field must be editable to be enabled: && (mode != Mode.EDIT || field.isEditable()); } // DYNAMICS------------------------------------------------------ protected final Project project; protected final CUI ui; protected final ProjectStore projectStore; protected final RecordStore recordStore; protected final FileStorageProvider fileStorageProvider; protected Logger logger; protected Stack<FormSession> formHistory; protected FormSession currFormSession; protected FormSession prevFormSession; protected boolean handlingUserGoBackRequest = false; protected volatile boolean blockedUI = false; public Controller(Project project, CUI ui, ProjectStore projectStore, RecordStore recordStore, FileStorageProvider fileStorageProvider) { this.project = project; this.ui = ui; this.projectStore = projectStore; this.recordStore = recordStore; this.fileStorageProvider = fileStorageProvider; // Collections: formHistory = new Stack<FormSession>(); } public void startProject() { if(project.isLogging()) { try { logger = createLogger(); // Log the DeviceID logger.addLine("DeviceID (CRC32)", String.valueOf(getDeviceID())); logger.addBlankLine(); // Log the start of the project logger.addLine("PROJECT_START", project.toString()); logger.addBlankLine(); } catch(FileStorageException fse) { fse.printStackTrace(System.err); } catch(IOException ioe) { ioe.printStackTrace(System.err); } } // Clear/reset: prevFormSession = null; currFormSession = null; formHistory.clear(); handlingUserGoBackRequest = false; // Open a Create-mode session for the startForm: openFormSession(FormSession.Create(project.getStartForm(), this)); } protected void openFormSession(FormSession formSession) { openFormSession(formSession, false); } protected void openFormSession(FormSession formSession, boolean resumeForm) { // Deal with current form session: if(currFormSession != null) { disableTriggers(currFormSession.form.getTriggers()); // disable triggers prevFormSession = currFormSession; // remember previous formSession (always) if(!resumeForm && // If we are not "coming back", currFormSession.form != formSession.form && // AND we are not looping within the same form, !currFormSession.form.isSkipOnBack()) // AND the previous form does not have skipOnBack=true, formHistory.push(currFormSession); // THEN: add previous formSession to history } currFormSession = formSession; currFormSession.setCurrentFieldDisplayed(false); //!!! // Log start form addLogLine("FORM_START", currFormSession.form.getName() + " (index: " + currFormSession.form.getPosition() + ")", "mode: " + currFormSession.mode.name()); // Location... List<LocationField> lfStartWithForm = currFormSession.form.getLocationFields(true); if(!lfStartWithForm.isEmpty()) startLocationListener(lfStartWithForm); // start listening for location updates else stopLocationListener(); // stop listening for location updates (if we were still listening for another form for example) // Setup the triggers setupTriggers(currFormSession.form.getTriggers()); // Go to field... if(resumeForm && currFormSession.atField()) goTo(currFormSession.getCurrent()); // continue where we left off else goTo(new FieldWithArguments(currFormSession.form.getStartField())); // begin filling out the form at the start field } public void cancelAndRestartForm() { goTo(new FieldWithArguments(new EndField(currFormSession.form, false, Next.LOOPFORM)), LeaveRule.UNCONDITIONAL_NO_STORAGE); // loop without saving first (forced leaving of current field) } public void cancelAndStop() { goTo(new FieldWithArguments(new EndField(currFormSession.form, false, Next.EXITAPP)), LeaveRule.UNCONDITIONAL_NO_STORAGE); // exit without saving first (forced leaving of current field) } public boolean goToPreviousForm() { if(formHistory.empty()) return false; //else: openFormSession(formHistory.pop(), true); // re-open previous form return true; } /** * Go forward to next field (either the one below the current one or the one it jumps to) * * @param requestedByUser */ public void goForward(boolean requestedByUser) { advance(requestedByUser, true); // jump is allowed } /** * Advance to the field below the current field, or, if allowJump is true, the one the current field jumps to. * * @param requestedByUser * @param allowJump */ protected void advance(boolean requestedByUser, boolean allowJump) { if(handlingUserGoBackRequest && !requestedByUser) { // We are currently handling a user *back* request and this is an automatic *forward* request, then we should be back instead of forward! goBack(false); } else { // Normal going forward: if(currFormSession.atField()) goTo(currFormSession.form.getNextFieldAndArguments(getCurrentField(), allowJump)); else openFormSession(currFormSession); // this shouldn't happen really... } } /** * Go back to previous field or form * * @param requestedByUser */ public void goBack(boolean requestedByUser) { if(requestedByUser) // Remember we are handling a user initiated goBack request, this will turn subsequently triggered automatic goForward requests into goBack requests! handlingUserGoBackRequest = true; // Do *not* replace this by: handlingGoBackRequest = requestedByUser // Try to go to previous field... if(currFormSession.canGoBack()) goTo(currFormSession.getPrevious(true), LeaveRule.UNCONDITIONAL_WITH_STORAGE); // force leaving but allow storage (if valid) else // Try to go to previous form... goToPreviousForm(); // Reset user go back request flag: if(requestedByUser) handlingUserGoBackRequest = false; } /** * Return whether the controller is moving backwards (or forwards), by user request. * * @return true if going back and false otherwise */ public boolean isGoingBack() { return handlingUserGoBackRequest; } /** * @param withinFormOnly * @return whether of not we can go back to a previous field or (if withinFormOnly=false) form */ public boolean canGoBack(boolean withinFormOnly) { return (currFormSession != null && currFormSession.canGoBack()) || (!withinFormOnly && !formHistory.empty()); } /** * Re-enter current field */ public void goToCurrent(LeaveRule leaveRule) { goTo(currFormSession.getCurrent(), leaveRule); } public void goTo(FieldWithArguments nextFieldAndArguments) { goTo(nextFieldAndArguments, LeaveRule.CONDITIONAL); // only leave upon successful validation (& value storage) } /** * Go to the given field with arguments * * @param nextField * @param leaveRule determines what should happen when attempting to leave the current field */ public synchronized void goTo(FieldWithArguments nextFieldAndArguments, LeaveRule leaveRule) { // Null check... if(nextFieldAndArguments == null || nextFieldAndArguments.field == null) { addLogLine("NULL_FIELD"); return; } // Try to leave the currently displayed field... if(currFormSession.atField() && currFormSession.isCurrentFieldDisplayed() && !ui.getCurrentFieldUI().leaveField(currFormSession.record, leaveRule)) { addLogLine("STAY", "Not allowed to leave field " + getCurrentField().id); return; // not allowed to leave } // Next field becomes the (new) current field... currFormSession.setCurrent(nextFieldAndArguments); // deals with history as well // Temp variable for the new current field (avoids calling getters, and used to check whether another goForward/goTo call happens from the enter() method below): Field currField = currFormSession.getCurrentField(); // Skip the new current field if it is not meant to be shown in the current form mode: if(!isFieldToBeShown(currField)) { addLogLine("SKIPPING", currField.id, "Not shown on " + currFormSession.mode.name()); advance(false, false); // no jump allowed return; } // Entering new current field... addLogLine("REACHED", currField.id); boolean needsUIUpdate = currField.enter(this, currFormSession.getCurrentFieldArguments(), false); // pass arguments to enter() // UI update, if (still) needed: if(currFormSession.getCurrentField() == currField) { // If the current field hasn't changed as a result of the enter() call... if(needsUIUpdate) setFieldInUI(currField); // update UI if needed currFormSession.setCurrentFieldDisplayed(needsUIUpdate); // remember whether current field is displayed } //else: when the current field *has* changed as part of the entering then we are done here } /** * Can be overridden, e.g. to ensure the right (main/UI) thread is used * * @param newCurrentField */ protected void setFieldInUI(Field newCurrentField) { ui.setField(newCurrentField); } /** * Checks whether the given field is currently enabled. * While disabled fields may still be shown, a field that is *not* allowed to be shown is always disabled. * * @param field * @return */ public boolean isFieldEnabled(Field field) { return IsFieldEnabled(currFormSession, field); } /** * Checks whether the given field is to be shown in the current Mode. * Note that disabled fields may still be shown (e.g. displayed grayed-out). * * @param field * @return */ public boolean isFieldToBeShown(Field field) { return IsFieldToBeShown(currFormSession.mode, field); } protected void saveRecordAndAttachments() { if(!currFormSession.form.isProducesRecords()) //!!! return; // Finalise the currentRecord: currFormSession.form.finish(currFormSession.record); // (re)sets the end-time if necessary // Log record: addLogLine("RECORD", currFormSession.record.toString()); // Store currentRecord: try { recordStore.store(currFormSession.record); } catch(Exception e) { e.printStackTrace(System.err); addLogLine("ERROR", "Upon saving record", ExceptionHelpers.getMessageAndCause(e)); return; } // Persist attachments (which are already in the correct folder) by forgetting about them, // so they are not deleted when the controller is stopped (e.g. upon activity destroy). currFormSession.getMediaAttachments().clear(); // !!! // Signal the successful storage of the currentRecord // Vibration if(currFormSession.form.isVibrateOnSave()) vibrate(VIBRATION_DURATION_MS); // Play sound File endSoundFile = fileStorageProvider.getProjectSoundFile(project, currFormSession.form.getSaveSoundRelativePath()); if(FileHelpers.isReadableFile(endSoundFile)) playSound(endSoundFile); } /** * Makes the record null & deletes any media attachments. * * Note: * Making the record null is necessary to avoid that unsaved foreign records are used * (i.e. referred to with a foreign key value) when returning to a BelongsTo field in * a previous form (see {@link #enterBelongsTo(BelongsToField, FieldParameters)}). * Doing so is risky however because an NPE will be thrown (likely crashing the app) * when some FieldUI or controller method attempts to (illegally!) use the record * after this discard operation. Obviously that shouldn't happen but we've had several * cases in which it did. However, all (known) cases have been resolved and any new * similar cases would be revealed soon by an NPE and/or crash. * */ protected void discardRecordAndAttachments() { // Discard record: currFormSession.record = null; // !!! // Delete any attachments: for(File attachment : currFormSession.getMediaAttachments()) FileUtils.deleteQuietly(attachment); currFormSession.getMediaAttachments().clear(); } @Override public boolean enterChoiceField(ChoiceField cf, FieldParameters arguments, boolean withPage) { if(withPage) return true; // else (not with page): // Deal with leaves: if(cf.isLeaf()) return false; // this should never happen // Add the choice options to the log files addLogLine("CHOICE_OPTIONS", cf.getChildren().toString()); // The UI needs to be updated to show this ChoiceField, but only is there is at least one enable (i.e. selectable) child: for(ChoiceField child : cf.getChildren()) if(IsFieldEnabled(currFormSession, child)) return true; // This ChoiceField currently has no enabled children, so we should skip it: goForward(false); return false; } @Override public boolean enterMediaField(MediaField mf, FieldParameters arguments, boolean withPage) { if(withPage) return true; else { if(mf.isMaxReached(currFormSession.record)) { // Maximum number of attachments for this field is reached: goForward(false); // skip field //TODO this needs to change if we allow to delete previously generated media return false; } return true; } } @Override public boolean enterLocationField(LocationField lf, FieldParameters arguments, boolean withPage) { if(withPage && !(lf.getStartWith() == LocationField.StartWith.PAGE)) return false; if(lf.isWaitAtField() || /*try to use currentBestLocation:*/ !lf.storeLocation(currFormSession.record, getCurrentBestLocation())) { startLocationListener(lf); // start listening for a location return true; } else { // we already have a (good enough) location if(!withPage) goForward(false); // skip the wait screen return false; } } @Override public boolean enterOrientationField(OrientationField of, FieldParameters arguments, boolean withPage) { if(!withPage) startOrientationListener(); return true; // update UI (even though the orientation values are typically received instantaneously and the UI might never actually be seen by the user) } @Override public boolean enterPage(Page page, FieldParameters arguments) { // Enter child fields (but signal that they are entered as part of entering the page): for(Field f : page.getFields()) { if(!isFieldToBeShown(f)) addLogLine("SKIPPING", f.id, "not shown on " + currFormSession.mode.name()); else f.enter(this, FieldParameters.EMPTY, true); // enter with page (but don't pass on the arguments) } // Setup the triggers setupTriggers(page.getTriggers()); return true; } @Override public boolean enterLinksTo(LinksToField linksTo, FieldParameters arguments) { //TODO enterLinksTo // Record foreignRecord = getHeldRecord(rel); // if(foreignRecord != null) // openFormSession(FormSession.Edit(rel.getRelatedForm(), foreignRecord)); // Edit the "held" record // else // openFormSession(FormSession.Create(rel.getRelatedForm(), deviceIDHash)); ; // Open related from to create a new record return false; } @Override public boolean enterBelongsTo(BelongsToField belongsTo, FieldParameters arguments) { ForeignKeyColumn column = belongsTo.getColumn(); Constraint constraints = belongsTo.getConstraints(); RecordReference foreignKey = column.retrieveValue(currFormSession.record); // foreignKey may be null if(!arguments.getBoolean(BelongsToField.PARAMETER_WAITING_FOR_RELATED_FORM, false)) { // We were *not* waiting for a return from the relatedForm // Check is we already have a value... if(foreignKey != null) { // We already have a foreign key value if(arguments.getBoolean(BelongsToField.PARAMETER_EDIT, false)) { // We are in edit mode (the edit argument was true): arguments.put(BelongsToField.PARAMETER_WAITING_FOR_RELATED_FORM, Boolean.TRUE.toString()); // remember we are waiting for relatedForm openFormSession(FormSession.Edit(belongsTo.getRelatedForm(), recordStore.retrieveRecord(foreignKey.getRecordQuery()), this)); // open relatedForm to edit foreign record } else // We are not in edit mode (the edit argument was false, or more likely, missing) goForward(false); // continue to next field } else { // We don't have a foreign key value yet // Note: we ignore the edit argument here because we only allow editing if a value is already set Record foreignRecord = null; // Check is we are allowed to hold on to foreign records: if(belongsTo.isHoldForeignRecord()) { // The Relationship is allowed to hold on to foreign records RecordReference heldForeignKey = projectStore.retrieveHeldForeignKey(belongsTo); foreignRecord = heldForeignKey != null ? recordStore.retrieveRecord(heldForeignKey.getRecordQuery()) : null; if(constraints.isValid(foreignRecord)) // passing null will return false { // We have a "held" foreign key, the corresponding foreign record was found and meets the constraints column.storeValue(currFormSession.record, heldForeignKey); // Store foreign key goForward(false); // continue to next field } else { // Either we didn't have a "held" foreign key, OR no corresponding record was found, OR the record didn't meet the constraints projectStore.deleteHeldForeignKey(belongsTo); // clear held foreign key (if there was none nothing will happen) foreignRecord = null; // relatedForm will be opened for creation below } } if(foreignRecord == null) { // We didn't find a valid held foreign record or the relationship is simply *not* allowed to hold on to foreign records arguments.put(BelongsToField.PARAMETER_WAITING_FOR_RELATED_FORM, Boolean.TRUE.toString()); // remember we are waiting for relatedForm openFormSession(FormSession.Create(belongsTo.getRelatedForm(), this)); // open relatedForm to create new record } } } else { // We were waiting to return from the relatedForm... // Clear waitingForRelatedForm parameter: arguments.clear(BelongsToField.PARAMETER_WAITING_FOR_RELATED_FORM); // Check if we really came back from the relatedForm: if(prevFormSession != null && prevFormSession.form == belongsTo.getRelatedForm()) { // ... yes we did. Record foreignRecord = prevFormSession.record; if(constraints.isValid(foreignRecord)) // passing null will return false { // The relatedForm produced/edited a non-null record which meets the constraints foreignKey = new RecordReference(foreignRecord); column.storeValue(currFormSession.record, foreignKey); // Store/update foreign key if(belongsTo.isHoldForeignRecord()) projectStore.storeHeldForeignKey(belongsTo, foreignKey); // Store/update "held" foreign key if allowed goForward(true); // continue to next field } else { // Either the relatedForm did not save its record (i.e. it is now null), OR it doesn't meet the constraints if(foreignKey != null || belongsTo.isOptional()) // Either we already have a (previously set) foreign key value, OR we don't need one because the field is optional goForward(true); // continue to next field (keeping the currently stored foreign key if there is one, or keeping it blank if there is none) else // We do not already have a foreign key value & the field is not optional openFormSession(FormSession.Create(belongsTo.getRelatedForm(), this)); // re-open relatedForm to create new record } } else { // we were waiting to return from relatedForm but the previous form is another one: this should never happen(?) // TODO show error & restartForm? } } // TODO "reset starttime upon leave"? (would need to happen at every goForward() call) return false; } @Override public boolean enterTextBoxField(TextBoxField tbf, FieldParameters arguments, boolean withPage) { return true; } @Override public boolean enterLabelField(LabelField lblf, FieldParameters arguments, boolean withPage) { return true; } @Override public boolean enterCheckboxField(CheckBoxField cbf, FieldParameters arguments, boolean withPage) { return true; } @Override public boolean enterButtonField(ButtonField buttonField, FieldParameters arguments, boolean withPage) { return true; } @Override public boolean enterMultiListField(MultiListField mlf, FieldParameters arguments, boolean withPage) { return true; } @Override public boolean enterEndField(EndField ef, FieldParameters arguments) { // Logging: addLogLine("FORM_END", ef.id, currFormSession.form.getName(), Long.toString((getElapsedMillis() - currFormSession.startTime) / 1000) + " seconds"); // Save or discard: if(ef.isSave() && currFormSession.form.isProducesRecords()) saveRecordAndAttachments(); else discardRecordAndAttachments(); // Insert blank line in log: addBlankLogLine(); // Go to "next": switch(ef.getNext()) { case LOOPFORM: openFormSession(FormSession.Create(currFormSession.form, this)); break; case LOOPPROJ: startProject(); // formHistory & currFormSession will be cleared break; case PREVFORM: if(!goToPreviousForm()) // try to re-open previous form { // there is no previous form (this shouldn't really happen...): showError("Invalid state: no previous form to return to!", false); //TODO multilang startProject(); // restart project instead } break; case NEXTFORM: Form nextForm = project.getNextForm(currFormSession.form); if(nextForm != null) openFormSession(FormSession.Create(nextForm, this)); else { // there is no next form: showError("Invalid state: there is no next form to go to from here!", false); //TODO multilang startProject(); // restart project instead } break; case EXITAPP: exit(true); // exit controller & application break; } // No UI update needed: return false; } /** * Set-up the given triggers. * * @param triggers */ private void setupTriggers(List<Trigger> triggers) { for(Trigger trigger : triggers) setupTrigger(trigger); } /** * Disable the given triggers. * * @param triggers */ public void disableTriggers(List<Trigger> triggers) { for(Trigger trigger : triggers) disableTrigger(trigger); } /** * Set-up the given trigger * * @param trigger */ protected void setupTrigger(Trigger trigger) { // Key press trigger: if(!trigger.getKeys().isEmpty()) { setupKeyPressTrigger(trigger); addLogLine("TRIGGER", "Set-up key press trigger, firing on pressing of " + CollectionUtils.allToString(trigger.getKeys(), false)); } // Fixed timer trigger: if(trigger.getFixedTimer() != Trigger.NO_TIMEOUT) { if(logger != null) logger.addLine("TRIGGER", "Set-up fixed timer trigger, firing in " + trigger.getFixedTimer() + " seconds"); setupTimerTrigger(trigger); } } protected abstract void setupKeyPressTrigger(Trigger trigger); protected abstract void setupTimerTrigger(Trigger trigger); /** * Disable the given trigger * * @param trigger */ protected void disableTrigger(Trigger trigger) { // Key press trigger: if(!trigger.getKeys().isEmpty()) { disableKeyPressTrigger(trigger); addLogLine("TRIGGER", "Disabled key press trigger, firing on pressing of " + CollectionUtils.allToString(trigger.getKeys(), false)); } // Fixed timer trigger: if(trigger.getFixedTimer() != Trigger.NO_TIMEOUT) { if(logger != null) logger.addLine("TRIGGER", "Disabled fixed timer trigger"); disableTimerTrigger(trigger); } } protected abstract void disableKeyPressTrigger(Trigger trigger); protected abstract void disableTimerTrigger(Trigger trigger); /** * Execute trigger * * @param trigger */ public void fireTrigger(Trigger trigger) { if(trigger.getJump() == null) return; addLogLine("TRIGGER", "Fired, jumping to: " + trigger.getJump().id); goTo(new FieldWithArguments(trigger.getJump(), trigger.getNextFieldArguments())); } /** * Stops all use/activities of the controller but does not exit the containing application. * I.e. the controller can still be restarted! */ public void discard() { // Logging: addLogLine("FORM_END", "CONTROLLER_DISCARD", currFormSession.form.getName(), Long.toString((getElapsedMillis() - currFormSession.startTime) / 1000) + " seconds"); // Save nothing: discardRecordAndAttachments(); // Exit controller (but not the application): exit(false); } /** * Exit controller & optionally the containing application * * @param exitApp */ protected void exit(boolean exitApp) { // Cancel (timer) triggers: if(currFormSession.form != null) disableTriggers(currFormSession.form.getTriggers()); // Stop sensors: stopLocationListener(); stopOrientationListener(); // Close log file: if(logger != null) { logger.addFinalLine("EXIT_COLLECTOR", project.getName(), currFormSession.form.id); // closes the logger & underlying file(writer) logger = null; } if(exitApp) exitApp(); } /** * @return the project */ public Project getProject() { return project; } /** * @return the currentRecord */ public Record getCurrentRecord() { return currFormSession.record; } /** * @return the current Form */ public Form getCurrentForm() { return currFormSession.form; } /** * @return the mode of the currently open form */ public Mode getCurrentMode() { return currFormSession.mode; } /** * @return the current Field */ public Field getCurrentField() { return currFormSession.getCurrentField(); } /** * @return the current FieldArguments */ public FieldParameters getCurrentFieldArguments() { return currFormSession.getCurrentFieldArguments(); } public void addLogLine(String... fields) { if(logger != null) logger.addLine(fields); } public void addBlankLogLine() { if(logger != null) logger.addBlankLine(); } public FileStorageProvider getFileStorageProvider() { return fileStorageProvider; } protected void startLocationListener(LocationField locField) { startLocationListener(Arrays.asList(locField)); } protected abstract void startOrientationListener(); protected abstract void stopOrientationListener(); protected abstract void startLocationListener(List<LocationField> locFields); protected abstract void stopLocationListener(); public abstract Location getCurrentBestLocation(); public void addMediaAttachment(File mediaAttachment) { currFormSession.addMediaAttachment(mediaAttachment); } protected abstract void vibrate(int durationMS); protected abstract void playSound(File soundFile); protected abstract void showError(String errorMsg, boolean exit); protected abstract void exitApp(); protected abstract long getDeviceID(); /** * Returns the number of milliseconds since system boot.<br/> * Should use a realtime monotonic clock, i.e. independent of time(zone) changes, deep sleep, CPU power saving, etc. * * @return number of milliseconds since system boot */ protected abstract long getElapsedMillis(); /** * @return the blockedUI */ public synchronized boolean isUIBlocked() { return blockedUI; } /** * Block UI */ public synchronized void blockUI() { this.blockedUI = true; } /** * Unblock UI */ public synchronized void unblockUI() { this.blockedUI = false; } protected Logger createLogger() throws FileStorageException, IOException { return new Logger(fileStorageProvider.getProjectLogsFolder(project, true).getAbsolutePath(), LOG_PREFIX, true); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2011.12.14 at 03:30:44 PM CET // package ch.epfl.bbp.uima.xml.archivearticle3; import java.util.ArrayList; import java.util.List; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlMixed; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{}email"/> * &lt;element ref="{}ext-link"/> * &lt;element ref="{}uri"/> * &lt;element ref="{}inline-supplementary-material"/> * &lt;element ref="{}related-article"/> * &lt;element ref="{}related-object"/> * &lt;element ref="{}hr"/> * &lt;element ref="{}bold"/> * &lt;element ref="{}italic"/> * &lt;element ref="{}monospace"/> * &lt;element ref="{}overline"/> * &lt;element ref="{}overline-start"/> * &lt;element ref="{}overline-end"/> * &lt;element ref="{}roman"/> * &lt;element ref="{}sans-serif"/> * &lt;element ref="{}sc"/> * &lt;element ref="{}strike"/> * &lt;element ref="{}underline"/> * &lt;element ref="{}underline-start"/> * &lt;element ref="{}underline-end"/> * &lt;element ref="{}alternatives"/> * &lt;element ref="{}inline-graphic"/> * &lt;element ref="{}private-char"/> * &lt;element ref="{}chem-struct"/> * &lt;element ref="{}inline-formula"/> * &lt;element ref="{}tex-math"/> * &lt;element ref="{http://www.w3.org/1998/Math/MathML}math"/> * &lt;element ref="{}abbrev"/> * &lt;element ref="{}milestone-end"/> * &lt;element ref="{}milestone-start"/> * &lt;element ref="{}named-content"/> * &lt;element ref="{}styled-content"/> * &lt;element ref="{}fn"/> * &lt;element ref="{}target"/> * &lt;element ref="{}xref"/> * &lt;element ref="{}sub"/> * &lt;element ref="{}sup"/> * &lt;element ref="{}x"/> * &lt;/choice> * &lt;attribute name="content-type" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "fax") public class Fax { @XmlElementRefs({ @XmlElementRef(name = "sans-serif", type = SansSerif.class), @XmlElementRef(name = "monospace", type = Monospace.class), @XmlElementRef(name = "ext-link", type = ExtLink.class), @XmlElementRef(name = "milestone-start", type = MilestoneStart.class), @XmlElementRef(name = "x", type = X.class), @XmlElementRef(name = "xref", type = Xref.class), @XmlElementRef(name = "roman", type = Roman.class), @XmlElementRef(name = "sup", type = Sup.class), @XmlElementRef(name = "italic", type = Italic.class), @XmlElementRef(name = "sc", type = Sc.class), @XmlElementRef(name = "underline-end", type = UnderlineEnd.class), @XmlElementRef(name = "related-object", type = RelatedObject.class), @XmlElementRef(name = "bold", type = Bold.class), @XmlElementRef(name = "private-char", type = PrivateChar.class), @XmlElementRef(name = "alternatives", type = Alternatives.class), @XmlElementRef(name = "inline-formula", type = InlineFormula.class), @XmlElementRef(name = "target", type = Target.class), @XmlElementRef(name = "overline-start", type = OverlineStart.class), @XmlElementRef(name = "related-article", type = RelatedArticle.class), @XmlElementRef(name = "styled-content", type = StyledContent.class), @XmlElementRef(name = "fn", type = Fn.class), @XmlElementRef(name = "strike", type = Strike.class), @XmlElementRef(name = "underline-start", type = UnderlineStart.class), @XmlElementRef(name = "sub", type = Sub.class), @XmlElementRef(name = "overline", type = Overline.class), @XmlElementRef(name = "milestone-end", type = MilestoneEnd.class), @XmlElementRef(name = "named-content", type = NamedContent.class), @XmlElementRef(name = "inline-graphic", type = InlineGraphic.class), @XmlElementRef(name = "tex-math", type = TexMath.class), @XmlElementRef(name = "hr", type = Hr.class), @XmlElementRef(name = "chem-struct", type = ChemStruct.class), @XmlElementRef(name = "overline-end", type = OverlineEnd.class), @XmlElementRef(name = "inline-supplementary-material", type = InlineSupplementaryMaterial.class), @XmlElementRef(name = "abbrev", type = Abbrev.class), @XmlElementRef(name = "email", type = Email.class), @XmlElementRef(name = "math", namespace = "http://www.w3.org/1998/Math/MathML", type = JAXBElement.class), @XmlElementRef(name = "underline", type = Underline.class), @XmlElementRef(name = "uri", type = Uri.class) }) @XmlMixed protected List<Object> content; @XmlAttribute(name = "content-type") @XmlSchemaType(name = "anySimpleType") protected String contentType; /** * Gets the value of the content property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the content property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link SansSerif } * {@link Monospace } * {@link ExtLink } * {@link MilestoneStart } * {@link Roman } * {@link Xref } * {@link X } * {@link Sup } * {@link Italic } * {@link UnderlineEnd } * {@link Sc } * {@link RelatedObject } * {@link Bold } * {@link PrivateChar } * {@link InlineFormula } * {@link Alternatives } * {@link Target } * {@link OverlineStart } * {@link RelatedArticle } * {@link StyledContent } * {@link Strike } * {@link Fn } * {@link UnderlineStart } * {@link Overline } * {@link Sub } * {@link MilestoneEnd } * {@link NamedContent } * {@link InlineGraphic } * {@link TexMath } * {@link Hr } * {@link ChemStruct } * {@link String } * {@link InlineSupplementaryMaterial } * {@link OverlineEnd } * {@link Email } * {@link Abbrev } * {@link JAXBElement }{@code <}{@link MathType }{@code >} * {@link Underline } * {@link Uri } * * */ public List<Object> getContent() { if (content == null) { content = new ArrayList<Object>(); } return this.content; } /** * Gets the value of the contentType property. * * @return * possible object is * {@link String } * */ public String getContentType() { return contentType; } /** * Sets the value of the contentType property. * * @param value * allowed object is * {@link String } * */ public void setContentType(String value) { this.contentType = value; } }
// -*- mode:java; encoding:utf-8 -*- // vim:set fileencoding=utf-8: // @homepage@ package example; import java.awt.*; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.geom.AffineTransform; import java.awt.geom.Area; import java.awt.geom.Ellipse2D; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.awt.geom.RectangularShape; import java.awt.geom.RoundRectangle2D; import java.awt.image.BufferedImage; import java.awt.image.ImageObserver; import java.io.IOException; import java.io.InputStream; import java.util.Optional; import javax.imageio.ImageIO; import javax.swing.*; public final class MainPanel extends JPanel { private final transient DraggableImageMouseListener di; private MainPanel() { super(); String path = "example/test.png"; ClassLoader cl = Thread.currentThread().getContextClassLoader(); Image img = Optional.ofNullable(cl.getResource(path)).map(u -> { try (InputStream s = u.openStream()) { return ImageIO.read(s); } catch (IOException ex) { return makeMissingImage(); } }).orElseGet(MainPanel::makeMissingImage); di = new DraggableImageMouseListener(new ImageIcon(img)); addMouseListener(di); addMouseMotionListener(di); setPreferredSize(new Dimension(320, 240)); } private static Image makeMissingImage() { Icon missingIcon = new MissingIcon(); int w = missingIcon.getIconWidth(); int h = missingIcon.getIconHeight(); BufferedImage bi = new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB); Graphics2D g2 = bi.createGraphics(); missingIcon.paintIcon(null, g2, 0, 0); g2.dispose(); return bi; } @Override protected void paintComponent(Graphics g) { // super.paintComponent(g); Graphics2D g2 = (Graphics2D) g.create(); int w = getWidth(); int h = getHeight(); g2.setPaint(new GradientPaint(50f, 0f, Color.GRAY, w, h, Color.DARK_GRAY, true)); g2.fillRect(0, 0, w, h); g2.dispose(); di.paint(g, this); } public static void main(String[] args) { EventQueue.invokeLater(MainPanel::createAndShowGui); } private static void createAndShowGui() { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) { ex.printStackTrace(); Toolkit.getDefaultToolkit().beep(); } JFrame frame = new JFrame("@title@"); frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); frame.getContentPane().add(new MainPanel()); frame.pack(); frame.setLocationRelativeTo(null); frame.setVisible(true); } } class DraggableImageMouseListener extends MouseAdapter { private static final BasicStroke BORDER_STROKE = new BasicStroke(4f); private static final Color BORDER_COLOR = Color.WHITE; private static final Color HOVER_COLOR = new Color(0x64_64_FF_C8, true); private static final int IR = 40; private static final int OR = IR * 3; private final Shape border; private final Shape polaroid; private final RectangularShape inner = new Ellipse2D.Double(0d, 0d, IR, IR); private final RectangularShape outer = new Ellipse2D.Double(0d, 0d, OR, OR); private final Point2D startPt = new Point2D.Double(); // drag start point private final Point2D centerPt = new Point2D.Double(100d, 100d); // center of Image private final Dimension imageSz; private final Image image; private double radian = 45d * (Math.PI / 180d); private double startRadian; // drag start radian private boolean moverHover; private boolean rotatorHover; protected DraggableImageMouseListener(ImageIcon ii) { super(); image = ii.getImage(); int width = ii.getIconWidth(); int height = ii.getIconHeight(); imageSz = new Dimension(width, height); border = new RoundRectangle2D.Double(0d, 0d, width, height, 10d, 10d); polaroid = new Rectangle2D.Double(-2d, -2d, width + 4d, height + 20d); setCirclesLocation(centerPt); } private void setCirclesLocation(Point2D center) { double cx = center.getX(); double cy = center.getY(); inner.setFrameFromCenter(cx, cy, cx + IR / 2d, cy - IR / 2d); outer.setFrameFromCenter(cx, cy, cx + OR / 2d, cy - OR / 2d); } public void paint(Graphics g, ImageObserver observer) { Graphics2D g2 = (Graphics2D) g.create(); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); double w2 = imageSz.width / 2d; double h2 = imageSz.height / 2d; double tx = centerPt.getX() - w2; double ty = centerPt.getY() - h2; AffineTransform at = AffineTransform.getTranslateInstance(tx, ty); at.rotate(radian, w2, h2); g2.setPaint(BORDER_COLOR); g2.setStroke(BORDER_STROKE); Shape s = at.createTransformedShape(polaroid); g2.fill(s); g2.draw(s); g2.drawImage(image, at, observer); if (rotatorHover) { Area donut = new Area(outer); donut.subtract(new Area(inner)); g2.setPaint(HOVER_COLOR); g2.fill(donut); } else if (moverHover) { g2.setPaint(HOVER_COLOR); g2.fill(inner); } g2.setPaint(BORDER_COLOR); g2.setStroke(BORDER_STROKE); g2.draw(at.createTransformedShape(border)); g2.dispose(); } @Override public void mouseMoved(MouseEvent e) { if (outer.contains(e.getX(), e.getY()) && !inner.contains(e.getX(), e.getY())) { moverHover = false; rotatorHover = true; } else if (inner.contains(e.getX(), e.getY())) { moverHover = true; rotatorHover = false; } else { moverHover = false; rotatorHover = false; } e.getComponent().repaint(); } @Override public void mouseReleased(MouseEvent e) { rotatorHover = false; moverHover = false; e.getComponent().repaint(); } @Override public void mousePressed(MouseEvent e) { if (outer.contains(e.getX(), e.getY()) && !inner.contains(e.getPoint())) { rotatorHover = true; startRadian = radian - Math.atan2(e.getY() - centerPt.getY(), e.getX() - centerPt.getX()); e.getComponent().repaint(); } else if (inner.contains(e.getPoint())) { moverHover = true; startPt.setLocation(e.getPoint()); e.getComponent().repaint(); } } @Override public void mouseDragged(MouseEvent e) { if (rotatorHover) { double y = e.getY() - centerPt.getY(); double x = e.getX() - centerPt.getX(); radian = startRadian + Math.atan2(y, x); e.getComponent().repaint(); } else if (moverHover) { double x = centerPt.getX() + e.getX() - startPt.getX(); double y = centerPt.getY() + e.getY() - startPt.getY(); centerPt.setLocation(x, y); setCirclesLocation(centerPt); startPt.setLocation(e.getPoint()); e.getComponent().repaint(); } } } class MissingIcon implements Icon { @Override public void paintIcon(Component c, Graphics g, int x, int y) { Graphics2D g2 = (Graphics2D) g.create(); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); int w = getIconWidth(); int h = getIconHeight(); int gap = w / 5; g2.setColor(Color.WHITE); g2.fillRect(x, y, w, h); g2.setColor(Color.RED); g2.setStroke(new BasicStroke(w / 8f)); g2.drawLine(x + gap, y + gap, x + w - gap, y + h - gap); g2.drawLine(x + gap, y + h - gap, x + w - gap, y + gap); g2.dispose(); } @Override public int getIconWidth() { return 320; } @Override public int getIconHeight() { return 240; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.dmn.engine.impl; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.flowable.common.engine.api.FlowableException; import org.flowable.common.engine.impl.el.ExpressionManager; import org.flowable.dmn.api.DecisionExecutionAuditContainer; import org.flowable.dmn.api.ExecuteDecisionContext; import org.flowable.dmn.engine.RuleEngineExecutor; import org.flowable.dmn.engine.impl.el.ELExecutionContext; import org.flowable.dmn.engine.impl.el.ELExecutionContextBuilder; import org.flowable.dmn.engine.impl.el.ELExpressionExecutor; import org.flowable.dmn.engine.impl.el.ExecutionVariableFactory; import org.flowable.dmn.engine.impl.hitpolicy.AbstractHitPolicy; import org.flowable.dmn.engine.impl.hitpolicy.ComposeDecisionResultBehavior; import org.flowable.dmn.engine.impl.hitpolicy.ComposeRuleResultBehavior; import org.flowable.dmn.engine.impl.hitpolicy.ContinueEvaluatingBehavior; import org.flowable.dmn.engine.impl.hitpolicy.EvaluateRuleValidityBehavior; import org.flowable.dmn.model.Decision; import org.flowable.dmn.model.DecisionRule; import org.flowable.dmn.model.DecisionTable; import org.flowable.dmn.model.HitPolicy; import org.flowable.dmn.model.LiteralExpression; import org.flowable.dmn.model.RuleInputClauseContainer; import org.flowable.dmn.model.RuleOutputClauseContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; /** * @author Yvo Swillens */ public class RuleEngineExecutorImpl implements RuleEngineExecutor { private static final Logger LOGGER = LoggerFactory.getLogger(RuleEngineExecutorImpl.class); protected Map<String, AbstractHitPolicy> hitPolicyBehaviors; protected ExpressionManager expressionManager; protected ObjectMapper objectMapper; public RuleEngineExecutorImpl(Map<String, AbstractHitPolicy> hitPolicyBehaviors, ExpressionManager expressionManager, ObjectMapper objectMapper) { this.hitPolicyBehaviors = hitPolicyBehaviors; this.expressionManager = expressionManager; this.objectMapper = objectMapper; } /** * Executes the given decision and creates the outcome results * * @param decision the DMN decision * @param executeDecisionInfo * @return updated execution variables map */ @Override public DecisionExecutionAuditContainer execute(Decision decision, ExecuteDecisionContext executeDecisionInfo) { if (decision == null) { throw new IllegalArgumentException("no decision provided"); } if (decision.getExpression() == null || !(decision.getExpression() instanceof DecisionTable)) { throw new IllegalArgumentException("no decision table present in decision"); } DecisionTable currentDecisionTable = (DecisionTable) decision.getExpression(); // create execution context and audit trail ELExecutionContext executionContext = ELExecutionContextBuilder.build(decision, executeDecisionInfo); try { sanityCheckDecisionTable(currentDecisionTable); // evaluate decision table evaluateDecisionTable(currentDecisionTable, executionContext); } catch (FlowableException fe) { LOGGER.error("decision table execution sanity check failed", fe); executionContext.getAuditContainer().setFailed(); executionContext.getAuditContainer().setExceptionMessage(getExceptionMessage(fe)); } finally { // end audit trail executionContext.getAuditContainer().stopAudit(); } return executionContext.getAuditContainer(); } protected void evaluateDecisionTable(DecisionTable decisionTable, ELExecutionContext executionContext) { LOGGER.debug("Start table evaluation: {}", decisionTable.getId()); if (decisionTable == null || decisionTable.getRules().isEmpty()) { throw new IllegalArgumentException("no rules present in table"); } if (executionContext == null) { throw new FlowableException("no execution context available"); } try { // evaluate rule conditions Map<Integer, List<RuleOutputClauseContainer>> validRuleOutputEntries = new HashMap<>(); for (DecisionRule rule : decisionTable.getRules()) { boolean ruleResult = executeRule(rule, executionContext); if (ruleResult) { // evaluate decision table hit policy validity if (getHitPolicyBehavior(decisionTable.getHitPolicy()) instanceof EvaluateRuleValidityBehavior) { ((EvaluateRuleValidityBehavior) getHitPolicyBehavior(decisionTable.getHitPolicy())).evaluateRuleValidity(rule.getRuleNumber(), executionContext); } // add valid rule output(s) validRuleOutputEntries.put(rule.getRuleNumber(), rule.getOutputEntries()); } // should continue evaluating if (getHitPolicyBehavior(decisionTable.getHitPolicy()) instanceof ContinueEvaluatingBehavior) { if (getHitPolicyBehavior(decisionTable.getHitPolicy()).shouldContinueEvaluating(ruleResult) == false) { LOGGER.debug("Stopping execution; hit policy {} specific behaviour", decisionTable.getHitPolicy()); break; } } } // compose rule conclusions for (Map.Entry<Integer, List<RuleOutputClauseContainer>> entry : validRuleOutputEntries.entrySet()) { executeOutputEntryAction(entry.getKey(), entry.getValue(), decisionTable.getHitPolicy(), executionContext); } // post rule conclusion actions if (getHitPolicyBehavior(decisionTable.getHitPolicy()) instanceof ComposeDecisionResultBehavior) { getHitPolicyBehavior(decisionTable.getHitPolicy()).composeDecisionResults(executionContext); } } catch (FlowableException ade) { LOGGER.error("decision table execution failed", ade); executionContext.getRuleResults().clear(); executionContext.getAuditContainer().setFailed(); executionContext.getAuditContainer().setExceptionMessage(getExceptionMessage(ade)); } LOGGER.debug("End table evaluation: {}", decisionTable.getId()); } protected boolean executeRule(DecisionRule rule, ELExecutionContext executionContext) { if (rule == null) { throw new FlowableException("rule cannot be null"); } LOGGER.debug("Start rule {} evaluation", rule.getRuleNumber()); // add audit entry executionContext.getAuditContainer().addRuleEntry(rule); boolean conditionResult = false; // go through conditions for (RuleInputClauseContainer conditionContainer : rule.getInputEntries()) { // resetting value String inputEntryId = conditionContainer.getInputEntry().getId(); conditionResult = false; try { // if condition is empty condition or has dash symbol result is TRUE String inputEntryText = conditionContainer.getInputEntry().getText(); if (StringUtils.isEmpty(inputEntryText) || "-".equals(inputEntryText)) { conditionResult = true; } else { conditionResult = executeInputExpressionEvaluation(conditionContainer, executionContext); } // add audit entry executionContext.getAuditContainer().addInputEntry(rule.getRuleNumber(), inputEntryId, conditionResult); LOGGER.debug("input entry {} ( {} {} ): {}", inputEntryId, conditionContainer.getInputClause().getInputExpression().getText(), inputEntryText, conditionResult); } catch (FlowableException ade) { // add failed audit entry and rethrow executionContext.getAuditContainer().addInputEntry(rule.getRuleNumber(), inputEntryId, getExceptionMessage(ade), null); throw ade; } catch (Exception e) { // add failed audit entry and rethrow executionContext.getAuditContainer().addInputEntry(rule.getRuleNumber(), inputEntryId, getExceptionMessage(e), null); throw new FlowableException(getExceptionMessage(e), e); } // exit evaluation loop if a condition is evaluated false if (!conditionResult) { break; } } if (conditionResult) { // mark rule valid executionContext.getAuditContainer().markRuleValid(rule.getRuleNumber()); } // mark rule end executionContext.getAuditContainer().markRuleEnd(rule.getRuleNumber()); LOGGER.debug("End rule {} evaluation", rule.getRuleNumber()); return conditionResult; } protected Boolean executeInputExpressionEvaluation(RuleInputClauseContainer ruleContainer, ELExecutionContext executionContext) { return ELExpressionExecutor.executeInputExpression(ruleContainer.getInputClause(), ruleContainer.getInputEntry(), expressionManager, executionContext); } protected void executeOutputEntryAction(int ruleNumber, List<RuleOutputClauseContainer> ruleOutputContainers, HitPolicy hitPolicy, ELExecutionContext executionContext) { LOGGER.debug("Start conclusion processing"); for (RuleOutputClauseContainer clauseContainer : ruleOutputContainers) { composeOutputEntryResult(ruleNumber, clauseContainer, hitPolicy, executionContext); } LOGGER.debug("End conclusion processing"); } protected void composeOutputEntryResult(int ruleNumber, RuleOutputClauseContainer ruleClauseContainer, HitPolicy hitPolicy, ELExecutionContext executionContext) { LOGGER.debug("Start evaluation conclusion {} of valid rule {}", ruleClauseContainer.getOutputClause().getOutputNumber(), ruleNumber); String outputVariableId = ruleClauseContainer.getOutputClause().getName(); String outputVariableType = ruleClauseContainer.getOutputClause().getTypeRef(); LiteralExpression outputEntryExpression = ruleClauseContainer.getOutputEntry(); if (StringUtils.isNotEmpty(outputEntryExpression.getText())) { Object executionVariable = null; try { Object resultValue = ELExpressionExecutor.executeOutputExpression(ruleClauseContainer.getOutputClause(), outputEntryExpression, expressionManager, executionContext); executionVariable = ExecutionVariableFactory.getExecutionVariable(outputVariableType, resultValue); // update execution context executionContext.getStackVariables().put(outputVariableId, executionVariable); // create result if (getHitPolicyBehavior(hitPolicy) instanceof ComposeRuleResultBehavior) { ((ComposeRuleResultBehavior) getHitPolicyBehavior(hitPolicy)).composeRuleResult(ruleNumber, outputVariableId, executionVariable, executionContext); } // add audit entry executionContext.getAuditContainer().addOutputEntry(ruleNumber, outputEntryExpression.getId(), executionVariable); executionContext.getAuditContainer().addDecisionResultType(outputVariableId, outputVariableType); if (executionVariable != null) { LOGGER.debug("Created conclusion result: {} of type: {} with value {}", outputVariableId, resultValue.getClass(), resultValue); } else { LOGGER.warn("Could not create conclusion result"); } } catch (FlowableException ade) { // clear result variables executionContext.getRuleResults().clear(); // add failed audit entry and rethrow executionContext.getAuditContainer().addOutputEntry(ruleNumber, outputEntryExpression.getId(), getExceptionMessage(ade), executionVariable); throw ade; } catch (Exception e) { // clear result variables executionContext.getRuleResults().clear(); // add failed audit entry and rethrow executionContext.getAuditContainer().addOutputEntry(ruleNumber, outputEntryExpression.getId(), getExceptionMessage(e), executionVariable); throw new FlowableException(getExceptionMessage(e), e); } } else { LOGGER.debug("Expression is empty"); // add empty audit entry executionContext.getAuditContainer().addOutputEntry(ruleNumber, outputEntryExpression.getId(), null); } LOGGER.debug("End evaluation conclusion {} of valid rule {}", ruleClauseContainer.getOutputClause().getOutputNumber(), ruleNumber); } protected String getExceptionMessage(Exception exception) { String exceptionMessage; if (exception.getCause() != null && exception.getCause().getMessage() != null) { exceptionMessage = exception.getCause().getMessage(); } else { exceptionMessage = exception.getMessage(); } return exceptionMessage; } protected AbstractHitPolicy getHitPolicyBehavior(HitPolicy hitPolicy) { AbstractHitPolicy hitPolicyBehavior = hitPolicyBehaviors.get(hitPolicy.getValue()); if (hitPolicyBehavior == null) { String hitPolicyBehaviorNotFoundMessage = String.format("HitPolicy behavior: %s not configured", hitPolicy.getValue()); LOGGER.error(hitPolicyBehaviorNotFoundMessage); throw new FlowableException(hitPolicyBehaviorNotFoundMessage); } return hitPolicyBehavior; } protected void sanityCheckDecisionTable(DecisionTable decisionTable) { if (decisionTable.getHitPolicy() == HitPolicy.COLLECT && decisionTable.getAggregation() != null && decisionTable.getOutputs() != null) { if (decisionTable.getOutputs().size() > 1) { throw new FlowableException(String.format("HitPolicy: %s has aggregation: %s and multiple outputs. This is not supported", decisionTable.getHitPolicy(), decisionTable.getAggregation())); } if (!"number".equals(decisionTable.getOutputs().get(0).getTypeRef())) { throw new FlowableException(String.format("HitPolicy: %s has aggregation: %s needs output type number", decisionTable.getHitPolicy(), decisionTable.getAggregation())); } } } @Override public Map<String, AbstractHitPolicy> getHitPolicyBehaviors() { return hitPolicyBehaviors; } @Override public void setHitPolicyBehaviors(Map<String, AbstractHitPolicy> hitPolicyBehaviors) { this.hitPolicyBehaviors = hitPolicyBehaviors; } @Override public ExpressionManager getExpressionManager() { return expressionManager; } @Override public void setExpressionManager(ExpressionManager expressionManager) { this.expressionManager = expressionManager; } @Override public ObjectMapper getObjectMapper() { return objectMapper; } @Override public void setObjectMapper(ObjectMapper objectMapper) { this.objectMapper = objectMapper; } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.codedeploy.model; import java.io.Serializable; /** * <p> * Represents the output of a list deployments operation. * </p> */ public class ListDeploymentsResult implements Serializable, Cloneable { /** * <p> * A list of deployment IDs. * </p> */ private com.amazonaws.internal.SdkInternalList<String> deployments; /** * <p> * If the amount of information that is returned is significantly large, an * identifier will also be returned, which can be used in a subsequent list * deployments call to return the next set of deployments in the list. * </p> */ private String nextToken; /** * <p> * A list of deployment IDs. * </p> * * @return A list of deployment IDs. */ public java.util.List<String> getDeployments() { if (deployments == null) { deployments = new com.amazonaws.internal.SdkInternalList<String>(); } return deployments; } /** * <p> * A list of deployment IDs. * </p> * * @param deployments * A list of deployment IDs. */ public void setDeployments(java.util.Collection<String> deployments) { if (deployments == null) { this.deployments = null; return; } this.deployments = new com.amazonaws.internal.SdkInternalList<String>( deployments); } /** * <p> * A list of deployment IDs. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setDeployments(java.util.Collection)} or * {@link #withDeployments(java.util.Collection)} if you want to override * the existing values. * </p> * * @param deployments * A list of deployment IDs. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListDeploymentsResult withDeployments(String... deployments) { if (this.deployments == null) { setDeployments(new com.amazonaws.internal.SdkInternalList<String>( deployments.length)); } for (String ele : deployments) { this.deployments.add(ele); } return this; } /** * <p> * A list of deployment IDs. * </p> * * @param deployments * A list of deployment IDs. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListDeploymentsResult withDeployments( java.util.Collection<String> deployments) { setDeployments(deployments); return this; } /** * <p> * If the amount of information that is returned is significantly large, an * identifier will also be returned, which can be used in a subsequent list * deployments call to return the next set of deployments in the list. * </p> * * @param nextToken * If the amount of information that is returned is significantly * large, an identifier will also be returned, which can be used in a * subsequent list deployments call to return the next set of * deployments in the list. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * If the amount of information that is returned is significantly large, an * identifier will also be returned, which can be used in a subsequent list * deployments call to return the next set of deployments in the list. * </p> * * @return If the amount of information that is returned is significantly * large, an identifier will also be returned, which can be used in * a subsequent list deployments call to return the next set of * deployments in the list. */ public String getNextToken() { return this.nextToken; } /** * <p> * If the amount of information that is returned is significantly large, an * identifier will also be returned, which can be used in a subsequent list * deployments call to return the next set of deployments in the list. * </p> * * @param nextToken * If the amount of information that is returned is significantly * large, an identifier will also be returned, which can be used in a * subsequent list deployments call to return the next set of * deployments in the list. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListDeploymentsResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDeployments() != null) sb.append("Deployments: " + getDeployments() + ","); if (getNextToken() != null) sb.append("NextToken: " + getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListDeploymentsResult == false) return false; ListDeploymentsResult other = (ListDeploymentsResult) obj; if (other.getDeployments() == null ^ this.getDeployments() == null) return false; if (other.getDeployments() != null && other.getDeployments().equals(this.getDeployments()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDeployments() == null) ? 0 : getDeployments().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListDeploymentsResult clone() { try { return (ListDeploymentsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package java.awt.datatransfer; import java.awt.Toolkit; import java.lang.ref.SoftReference; import java.io.BufferedReader; import java.io.File; import java.io.InputStreamReader; import java.io.IOException; import java.net.URL; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import sun.awt.AppContext; import sun.awt.datatransfer.DataTransferer; /** * The SystemFlavorMap is a configurable map between "natives" (Strings), which * correspond to platform-specific data formats, and "flavors" (DataFlavors), * which correspond to platform-independent MIME types. This mapping is used * by the data transfer subsystem to transfer data between Java and native * applications, and between Java applications in separate VMs. * <p> * * @since 1.2 */ public final class SystemFlavorMap implements FlavorMap, FlavorTable { /** * Constant prefix used to tag Java types converted to native platform * type. */ private static String JavaMIME = "JAVA_DATAFLAVOR:"; private static final Object FLAVOR_MAP_KEY = new Object(); /** * Copied from java.util.Properties. */ private static final String keyValueSeparators = "=: \t\r\n\f"; private static final String strictKeyValueSeparators = "=:"; private static final String whiteSpaceChars = " \t\r\n\f"; /** * The list of valid, decoded text flavor representation classes, in order * from best to worst. */ private static final String[] UNICODE_TEXT_CLASSES = { "java.io.Reader", "java.lang.String", "java.nio.CharBuffer", "\"[C\"" }; /** * The list of valid, encoded text flavor representation classes, in order * from best to worst. */ private static final String[] ENCODED_TEXT_CLASSES = { "java.io.InputStream", "java.nio.ByteBuffer", "\"[B\"" }; /** * A String representing text/plain MIME type. */ private static final String TEXT_PLAIN_BASE_TYPE = "text/plain"; /** * A String representing text/html MIME type. */ private static final String HTML_TEXT_BASE_TYPE = "text/html"; /** * Maps native Strings to Lists of DataFlavors (or base type Strings for * text DataFlavors). * Do not use the field directly, use getNativeToFlavor() instead. */ private final Map<String, LinkedHashSet<DataFlavor>> nativeToFlavor = new HashMap<>(); /** * Accessor to nativeToFlavor map. Since we use lazy initialization we must * use this accessor instead of direct access to the field which may not be * initialized yet. This method will initialize the field if needed. * * @return nativeToFlavor */ private Map<String, LinkedHashSet<DataFlavor>> getNativeToFlavor() { if (!isMapInitialized) { initSystemFlavorMap(); } return nativeToFlavor; } /** * Maps DataFlavors (or base type Strings for text DataFlavors) to Lists of * native Strings. * Do not use the field directly, use getFlavorToNative() instead. */ private final Map<DataFlavor, LinkedHashSet<String>> flavorToNative = new HashMap<>(); /** * Accessor to flavorToNative map. Since we use lazy initialization we must * use this accessor instead of direct access to the field which may not be * initialized yet. This method will initialize the field if needed. * * @return flavorToNative */ private synchronized Map<DataFlavor, LinkedHashSet<String>> getFlavorToNative() { if (!isMapInitialized) { initSystemFlavorMap(); } return flavorToNative; } /** * Maps a text DataFlavor primary mime-type to the native. Used only to store * standard mappings registered in the flavormap.properties * Do not use this field directly, use getTextTypeToNative() instead. */ private Map<String, LinkedHashSet<String>> textTypeToNative = new HashMap<>(); /** * Shows if the object has been initialized. */ private boolean isMapInitialized = false; /** * An accessor to textTypeToNative map. Since we use lazy initialization we * must use this accessor instead of direct access to the field which may not * be initialized yet. This method will initialize the field if needed. * * @return textTypeToNative */ private synchronized Map<String, LinkedHashSet<String>> getTextTypeToNative() { if (!isMapInitialized) { initSystemFlavorMap(); // From this point the map should not be modified textTypeToNative = Collections.unmodifiableMap(textTypeToNative); } return textTypeToNative; } /** * Caches the result of getNativesForFlavor(). Maps DataFlavors to * SoftReferences which reference LinkedHashSet of String natives. */ private final SoftCache<DataFlavor, String> nativesForFlavorCache = new SoftCache<>(); /** * Caches the result getFlavorsForNative(). Maps String natives to * SoftReferences which reference LinkedHashSet of DataFlavors. */ private final SoftCache<String, DataFlavor> flavorsForNativeCache = new SoftCache<>(); /** * Dynamic mapping generation used for text mappings should not be applied * to the DataFlavors and String natives for which the mappings have been * explicitly specified with setFlavorsForNative() or * setNativesForFlavor(). This keeps all such keys. */ private Set<Object> disabledMappingGenerationKeys = new HashSet<>(); /** * Returns the default FlavorMap for this thread's ClassLoader. */ public static FlavorMap getDefaultFlavorMap() { AppContext context = AppContext.getAppContext(); FlavorMap fm = (FlavorMap) context.get(FLAVOR_MAP_KEY); if (fm == null) { fm = new SystemFlavorMap(); context.put(FLAVOR_MAP_KEY, fm); } return fm; } private SystemFlavorMap() { } /** * Initializes a SystemFlavorMap by reading flavormap.properties and * AWT.DnD.flavorMapFileURL. * For thread-safety must be called under lock on this. */ private void initSystemFlavorMap() { if (isMapInitialized) { return; } isMapInitialized = true; BufferedReader flavormapDotProperties = java.security.AccessController.doPrivileged( new java.security.PrivilegedAction<BufferedReader>() { public BufferedReader run() { String fileName = System.getProperty("java.home") + File.separator + "lib" + File.separator + "flavormap.properties"; try { return new BufferedReader (new InputStreamReader (new File(fileName).toURI().toURL().openStream(), "ISO-8859-1")); } catch (MalformedURLException e) { System.err.println("MalformedURLException:" + e + " while loading default flavormap.properties file:" + fileName); } catch (IOException e) { System.err.println("IOException:" + e + " while loading default flavormap.properties file:" + fileName); } return null; } }); String url = java.security.AccessController.doPrivileged( new java.security.PrivilegedAction<String>() { public String run() { return Toolkit.getProperty("AWT.DnD.flavorMapFileURL", null); } }); if (flavormapDotProperties != null) { try { parseAndStoreReader(flavormapDotProperties); } catch (IOException e) { System.err.println("IOException:" + e + " while parsing default flavormap.properties file"); } } BufferedReader flavormapURL = null; if (url != null) { try { flavormapURL = new BufferedReader(new InputStreamReader(new URL(url).openStream(), "ISO-8859-1")); } catch (MalformedURLException e) { System.err.println("MalformedURLException:" + e + " while reading AWT.DnD.flavorMapFileURL:" + url); } catch (IOException e) { System.err.println("IOException:" + e + " while reading AWT.DnD.flavorMapFileURL:" + url); } catch (SecurityException e) { // ignored } } if (flavormapURL != null) { try { parseAndStoreReader(flavormapURL); } catch (IOException e) { System.err.println("IOException:" + e + " while parsing AWT.DnD.flavorMapFileURL"); } } } /** * Copied code from java.util.Properties. Parsing the data ourselves is the * only way to handle duplicate keys and values. */ private void parseAndStoreReader(BufferedReader in) throws IOException { while (true) { // Get next line String line = in.readLine(); if (line == null) { return; } if (line.length() > 0) { // Continue lines that end in slashes if they are not comments char firstChar = line.charAt(0); if (firstChar != '#' && firstChar != '!') { while (continueLine(line)) { String nextLine = in.readLine(); if (nextLine == null) { nextLine = ""; } String loppedLine = line.substring(0, line.length() - 1); // Advance beyond whitespace on new line int startIndex = 0; for(; startIndex < nextLine.length(); startIndex++) { if (whiteSpaceChars. indexOf(nextLine.charAt(startIndex)) == -1) { break; } } nextLine = nextLine.substring(startIndex, nextLine.length()); line = loppedLine+nextLine; } // Find start of key int len = line.length(); int keyStart = 0; for(; keyStart < len; keyStart++) { if(whiteSpaceChars. indexOf(line.charAt(keyStart)) == -1) { break; } } // Blank lines are ignored if (keyStart == len) { continue; } // Find separation between key and value int separatorIndex = keyStart; for(; separatorIndex < len; separatorIndex++) { char currentChar = line.charAt(separatorIndex); if (currentChar == '\\') { separatorIndex++; } else if (keyValueSeparators. indexOf(currentChar) != -1) { break; } } // Skip over whitespace after key if any int valueIndex = separatorIndex; for (; valueIndex < len; valueIndex++) { if (whiteSpaceChars. indexOf(line.charAt(valueIndex)) == -1) { break; } } // Skip over one non whitespace key value separators if any if (valueIndex < len) { if (strictKeyValueSeparators. indexOf(line.charAt(valueIndex)) != -1) { valueIndex++; } } // Skip over white space after other separators if any while (valueIndex < len) { if (whiteSpaceChars. indexOf(line.charAt(valueIndex)) == -1) { break; } valueIndex++; } String key = line.substring(keyStart, separatorIndex); String value = (separatorIndex < len) ? line.substring(valueIndex, len) : ""; // Convert then store key and value key = loadConvert(key); value = loadConvert(value); try { MimeType mime = new MimeType(value); if ("text".equals(mime.getPrimaryType())) { String charset = mime.getParameter("charset"); if (DataTransferer.doesSubtypeSupportCharset (mime.getSubType(), charset)) { // We need to store the charset and eoln // parameters, if any, so that the // DataTransferer will have this information // for conversion into the native format. DataTransferer transferer = DataTransferer.getInstance(); if (transferer != null) { transferer.registerTextFlavorProperties (key, charset, mime.getParameter("eoln"), mime.getParameter("terminators")); } } // But don't store any of these parameters in the // DataFlavor itself for any text natives (even // non-charset ones). The SystemFlavorMap will // synthesize the appropriate mappings later. mime.removeParameter("charset"); mime.removeParameter("class"); mime.removeParameter("eoln"); mime.removeParameter("terminators"); value = mime.toString(); } } catch (MimeTypeParseException e) { e.printStackTrace(); continue; } DataFlavor flavor; try { flavor = new DataFlavor(value); } catch (Exception e) { try { flavor = new DataFlavor(value, null); } catch (Exception ee) { ee.printStackTrace(); continue; } } final LinkedHashSet<DataFlavor> dfs = new LinkedHashSet<>(); dfs.add(flavor); if ("text".equals(flavor.getPrimaryType())) { dfs.addAll(convertMimeTypeToDataFlavors(value)); store(flavor.mimeType.getBaseType(), key, getTextTypeToNative()); } for (DataFlavor df : dfs) { store(df, key, getFlavorToNative()); store(key, df, getNativeToFlavor()); } } } } } /** * Copied from java.util.Properties. */ private boolean continueLine (String line) { int slashCount = 0; int index = line.length() - 1; while((index >= 0) && (line.charAt(index--) == '\\')) { slashCount++; } return (slashCount % 2 == 1); } /** * Copied from java.util.Properties. */ private String loadConvert(String theString) { char aChar; int len = theString.length(); StringBuilder outBuffer = new StringBuilder(len); for (int x = 0; x < len; ) { aChar = theString.charAt(x++); if (aChar == '\\') { aChar = theString.charAt(x++); if (aChar == 'u') { // Read the xxxx int value = 0; for (int i = 0; i < 4; i++) { aChar = theString.charAt(x++); switch (aChar) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { value = (value << 4) + aChar - '0'; break; } case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': { value = (value << 4) + 10 + aChar - 'a'; break; } case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': { value = (value << 4) + 10 + aChar - 'A'; break; } default: { throw new IllegalArgumentException( "Malformed \\uxxxx encoding."); } } } outBuffer.append((char)value); } else { if (aChar == 't') { aChar = '\t'; } else if (aChar == 'r') { aChar = '\r'; } else if (aChar == 'n') { aChar = '\n'; } else if (aChar == 'f') { aChar = '\f'; } outBuffer.append(aChar); } } else { outBuffer.append(aChar); } } return outBuffer.toString(); } /** * Stores the listed object under the specified hash key in map. Unlike a * standard map, the listed object will not replace any object already at * the appropriate Map location, but rather will be appended to a List * stored in that location. */ private <H, L> void store(H hashed, L listed, Map<H, LinkedHashSet<L>> map) { LinkedHashSet<L> list = map.get(hashed); if (list == null) { list = new LinkedHashSet<>(1); map.put(hashed, list); } if (!list.contains(listed)) { list.add(listed); } } /** * Semantically equivalent to 'nativeToFlavor.get(nat)'. This method * handles the case where 'nat' is not found in 'nativeToFlavor'. In that * case, a new DataFlavor is synthesized, stored, and returned, if and * only if the specified native is encoded as a Java MIME type. */ private LinkedHashSet<DataFlavor> nativeToFlavorLookup(String nat) { LinkedHashSet<DataFlavor> flavors = getNativeToFlavor().get(nat); if (nat != null && !disabledMappingGenerationKeys.contains(nat)) { DataTransferer transferer = DataTransferer.getInstance(); if (transferer != null) { LinkedHashSet<DataFlavor> platformFlavors = transferer.getPlatformMappingsForNative(nat); if (!platformFlavors.isEmpty()) { if (flavors != null) { // Prepending the platform-specific mappings ensures // that the flavors added with // addFlavorForUnencodedNative() are at the end of // list. platformFlavors.addAll(flavors); } flavors = platformFlavors; } } } if (flavors == null && isJavaMIMEType(nat)) { String decoded = decodeJavaMIMEType(nat); DataFlavor flavor = null; try { flavor = new DataFlavor(decoded); } catch (Exception e) { System.err.println("Exception \"" + e.getClass().getName() + ": " + e.getMessage() + "\"while constructing DataFlavor for: " + decoded); } if (flavor != null) { flavors = new LinkedHashSet<>(1); getNativeToFlavor().put(nat, flavors); flavors.add(flavor); flavorsForNativeCache.remove(nat); LinkedHashSet<String> natives = getFlavorToNative().get(flavor); if (natives == null) { natives = new LinkedHashSet<>(1); getFlavorToNative().put(flavor, natives); } natives.add(nat); nativesForFlavorCache.remove(flavor); } } return (flavors != null) ? flavors : new LinkedHashSet<>(0); } /** * Semantically equivalent to 'flavorToNative.get(flav)'. This method * handles the case where 'flav' is not found in 'flavorToNative' depending * on the value of passes 'synthesize' parameter. If 'synthesize' is * SYNTHESIZE_IF_NOT_FOUND a native is synthesized, stored, and returned by * encoding the DataFlavor's MIME type. Otherwise an empty List is returned * and 'flavorToNative' remains unaffected. */ private LinkedHashSet<String> flavorToNativeLookup(final DataFlavor flav, final boolean synthesize) { LinkedHashSet<String> natives = getFlavorToNative().get(flav); if (flav != null && !disabledMappingGenerationKeys.contains(flav)) { DataTransferer transferer = DataTransferer.getInstance(); if (transferer != null) { LinkedHashSet<String> platformNatives = transferer.getPlatformMappingsForFlavor(flav); if (!platformNatives.isEmpty()) { if (natives != null) { // Prepend the platform-specific mappings to ensure // that the natives added with // addUnencodedNativeForFlavor() are at the end of // list. platformNatives.addAll(natives); } natives = platformNatives; } } } if (natives == null) { if (synthesize) { String encoded = encodeDataFlavor(flav); natives = new LinkedHashSet<>(1); getFlavorToNative().put(flav, natives); natives.add(encoded); LinkedHashSet<DataFlavor> flavors = getNativeToFlavor().get(encoded); if (flavors == null) { flavors = new LinkedHashSet<>(1); getNativeToFlavor().put(encoded, flavors); } flavors.add(flav); nativesForFlavorCache.remove(flav); flavorsForNativeCache.remove(encoded); } else { natives = new LinkedHashSet<>(0); } } return new LinkedHashSet<>(natives); } /** * Returns a <code>List</code> of <code>String</code> natives to which the * specified <code>DataFlavor</code> can be translated by the data transfer * subsystem. The <code>List</code> will be sorted from best native to * worst. That is, the first native will best reflect data in the specified * flavor to the underlying native platform. * <p> * If the specified <code>DataFlavor</code> is previously unknown to the * data transfer subsystem and the data transfer subsystem is unable to * translate this <code>DataFlavor</code> to any existing native, then * invoking this method will establish a * mapping in both directions between the specified <code>DataFlavor</code> * and an encoded version of its MIME type as its native. * * @param flav the <code>DataFlavor</code> whose corresponding natives * should be returned. If <code>null</code> is specified, all * natives currently known to the data transfer subsystem are * returned in a non-deterministic order. * @return a <code>java.util.List</code> of <code>java.lang.String</code> * objects which are platform-specific representations of platform- * specific data formats * * @see #encodeDataFlavor * @since 1.4 */ @Override public synchronized List<String> getNativesForFlavor(DataFlavor flav) { LinkedHashSet<String> retval = nativesForFlavorCache.check(flav); if (retval != null) { return new ArrayList<>(retval); } if (flav == null) { retval = new LinkedHashSet<>(getNativeToFlavor().keySet()); } else if (disabledMappingGenerationKeys.contains(flav)) { // In this case we shouldn't synthesize a native for this flavor, // since its mappings were explicitly specified. retval = flavorToNativeLookup(flav, false); } else if (DataTransferer.isFlavorCharsetTextType(flav)) { retval = new LinkedHashSet<>(0); // For text/* flavors, flavor-to-native mappings specified in // flavormap.properties are stored per flavor's base type. if ("text".equals(flav.getPrimaryType())) { LinkedHashSet<String> textTypeNatives = getTextTypeToNative().get(flav.mimeType.getBaseType()); if (textTypeNatives != null) { retval.addAll(textTypeNatives); } } // Also include text/plain natives, but don't duplicate Strings LinkedHashSet<String> textTypeNatives = getTextTypeToNative().get(TEXT_PLAIN_BASE_TYPE); if (textTypeNatives != null) { retval.addAll(textTypeNatives); } if (retval.isEmpty()) { retval = flavorToNativeLookup(flav, true); } else { // In this branch it is guaranteed that natives explicitly // listed for flav's MIME type were added with // addUnencodedNativeForFlavor(), so they have lower priority. retval.addAll(flavorToNativeLookup(flav, false)); } } else if (DataTransferer.isFlavorNoncharsetTextType(flav)) { retval = getTextTypeToNative().get(flav.mimeType.getBaseType()); if (retval == null || retval.isEmpty()) { retval = flavorToNativeLookup(flav, true); } else { // In this branch it is guaranteed that natives explicitly // listed for flav's MIME type were added with // addUnencodedNativeForFlavor(), so they have lower priority. retval.addAll(flavorToNativeLookup(flav, false)); } } else { retval = flavorToNativeLookup(flav, true); } nativesForFlavorCache.put(flav, retval); // Create a copy, because client code can modify the returned list. return new ArrayList<>(retval); } /** * Returns a <code>List</code> of <code>DataFlavor</code>s to which the * specified <code>String</code> native can be translated by the data * transfer subsystem. The <code>List</code> will be sorted from best * <code>DataFlavor</code> to worst. That is, the first * <code>DataFlavor</code> will best reflect data in the specified * native to a Java application. * <p> * If the specified native is previously unknown to the data transfer * subsystem, and that native has been properly encoded, then invoking this * method will establish a mapping in both directions between the specified * native and a <code>DataFlavor</code> whose MIME type is a decoded * version of the native. * <p> * If the specified native is not a properly encoded native and the * mappings for this native have not been altered with * <code>setFlavorsForNative</code>, then the contents of the * <code>List</code> is platform dependent, but <code>null</code> * cannot be returned. * * @param nat the native whose corresponding <code>DataFlavor</code>s * should be returned. If <code>null</code> is specified, all * <code>DataFlavor</code>s currently known to the data transfer * subsystem are returned in a non-deterministic order. * @return a <code>java.util.List</code> of <code>DataFlavor</code> * objects into which platform-specific data in the specified, * platform-specific native can be translated * * @see #encodeJavaMIMEType * @since 1.4 */ @Override public synchronized List<DataFlavor> getFlavorsForNative(String nat) { LinkedHashSet<DataFlavor> returnValue = flavorsForNativeCache.check(nat); if (returnValue != null) { return new ArrayList<>(returnValue); } else { returnValue = new LinkedHashSet<>(); } if (nat == null) { for (String n : getNativesForFlavor(null)) { returnValue.addAll(getFlavorsForNative(n)); } } else { final LinkedHashSet<DataFlavor> flavors = nativeToFlavorLookup(nat); if (disabledMappingGenerationKeys.contains(nat)) { return new ArrayList<>(flavors); } final LinkedHashSet<DataFlavor> flavorsWithSynthesized = nativeToFlavorLookup(nat); for (DataFlavor df : flavorsWithSynthesized) { returnValue.add(df); if ("text".equals(df.getPrimaryType())) { String baseType = df.mimeType.getBaseType(); returnValue.addAll(convertMimeTypeToDataFlavors(baseType)); } } } flavorsForNativeCache.put(nat, returnValue); return new ArrayList<>(returnValue); } private static Set<DataFlavor> convertMimeTypeToDataFlavors( final String baseType) { final Set<DataFlavor> returnValue = new LinkedHashSet<>(); String subType = null; try { final MimeType mimeType = new MimeType(baseType); subType = mimeType.getSubType(); } catch (MimeTypeParseException mtpe) { // Cannot happen, since we checked all mappings // on load from flavormap.properties. } if (DataTransferer.doesSubtypeSupportCharset(subType, null)) { if (TEXT_PLAIN_BASE_TYPE.equals(baseType)) { returnValue.add(DataFlavor.stringFlavor); } for (String unicodeClassName : UNICODE_TEXT_CLASSES) { final String mimeType = baseType + ";charset=Unicode;class=" + unicodeClassName; final LinkedHashSet<String> mimeTypes = handleHtmlMimeTypes(baseType, mimeType); for (String mt : mimeTypes) { DataFlavor toAdd = null; try { toAdd = new DataFlavor(mt); } catch (ClassNotFoundException cannotHappen) { } returnValue.add(toAdd); } } for (String charset : DataTransferer.standardEncodings()) { for (String encodedTextClass : ENCODED_TEXT_CLASSES) { final String mimeType = baseType + ";charset=" + charset + ";class=" + encodedTextClass; final LinkedHashSet<String> mimeTypes = handleHtmlMimeTypes(baseType, mimeType); for (String mt : mimeTypes) { DataFlavor df = null; try { df = new DataFlavor(mt); // Check for equality to plainTextFlavor so // that we can ensure that the exact charset of // plainTextFlavor, not the canonical charset // or another equivalent charset with a // different name, is used. if (df.equals(DataFlavor.plainTextFlavor)) { df = DataFlavor.plainTextFlavor; } } catch (ClassNotFoundException cannotHappen) { } returnValue.add(df); } } } if (TEXT_PLAIN_BASE_TYPE.equals(baseType)) { returnValue.add(DataFlavor.plainTextFlavor); } } else { // Non-charset text natives should be treated as // opaque, 8-bit data in any of its various // representations. for (String encodedTextClassName : ENCODED_TEXT_CLASSES) { DataFlavor toAdd = null; try { toAdd = new DataFlavor(baseType + ";class=" + encodedTextClassName); } catch (ClassNotFoundException cannotHappen) { } returnValue.add(toAdd); } } return returnValue; } private static final String [] htmlDocumntTypes = new String [] {"all", "selection", "fragment"}; private static LinkedHashSet<String> handleHtmlMimeTypes(String baseType, String mimeType) { LinkedHashSet<String> returnValues = new LinkedHashSet<>(); if (HTML_TEXT_BASE_TYPE.equals(baseType)) { for (String documentType : htmlDocumntTypes) { returnValues.add(mimeType + ";document=" + documentType); } } else { returnValues.add(mimeType); } return returnValues; } /** * Returns a <code>Map</code> of the specified <code>DataFlavor</code>s to * their most preferred <code>String</code> native. Each native value will * be the same as the first native in the List returned by * <code>getNativesForFlavor</code> for the specified flavor. * <p> * If a specified <code>DataFlavor</code> is previously unknown to the * data transfer subsystem, then invoking this method will establish a * mapping in both directions between the specified <code>DataFlavor</code> * and an encoded version of its MIME type as its native. * * @param flavors an array of <code>DataFlavor</code>s which will be the * key set of the returned <code>Map</code>. If <code>null</code> is * specified, a mapping of all <code>DataFlavor</code>s known to the * data transfer subsystem to their most preferred * <code>String</code> natives will be returned. * @return a <code>java.util.Map</code> of <code>DataFlavor</code>s to * <code>String</code> natives * * @see #getNativesForFlavor * @see #encodeDataFlavor */ @Override public synchronized Map<DataFlavor,String> getNativesForFlavors(DataFlavor[] flavors) { // Use getNativesForFlavor to generate extra natives for text flavors // and stringFlavor if (flavors == null) { List<DataFlavor> flavor_list = getFlavorsForNative(null); flavors = new DataFlavor[flavor_list.size()]; flavor_list.toArray(flavors); } Map<DataFlavor, String> retval = new HashMap<>(flavors.length, 1.0f); for (DataFlavor flavor : flavors) { List<String> natives = getNativesForFlavor(flavor); String nat = (natives.isEmpty()) ? null : natives.get(0); retval.put(flavor, nat); } return retval; } /** * Returns a <code>Map</code> of the specified <code>String</code> natives * to their most preferred <code>DataFlavor</code>. Each * <code>DataFlavor</code> value will be the same as the first * <code>DataFlavor</code> in the List returned by * <code>getFlavorsForNative</code> for the specified native. * <p> * If a specified native is previously unknown to the data transfer * subsystem, and that native has been properly encoded, then invoking this * method will establish a mapping in both directions between the specified * native and a <code>DataFlavor</code> whose MIME type is a decoded * version of the native. * * @param natives an array of <code>String</code>s which will be the * key set of the returned <code>Map</code>. If <code>null</code> is * specified, a mapping of all supported <code>String</code> natives * to their most preferred <code>DataFlavor</code>s will be * returned. * @return a <code>java.util.Map</code> of <code>String</code> natives to * <code>DataFlavor</code>s * * @see #getFlavorsForNative * @see #encodeJavaMIMEType */ @Override public synchronized Map<String,DataFlavor> getFlavorsForNatives(String[] natives) { // Use getFlavorsForNative to generate extra flavors for text natives if (natives == null) { List<String> nativesList = getNativesForFlavor(null); natives = new String[nativesList.size()]; nativesList.toArray(natives); } Map<String, DataFlavor> retval = new HashMap<>(natives.length, 1.0f); for (String aNative : natives) { List<DataFlavor> flavors = getFlavorsForNative(aNative); DataFlavor flav = (flavors.isEmpty())? null : flavors.get(0); retval.put(aNative, flav); } return retval; } /** * Adds a mapping from the specified <code>DataFlavor</code> (and all * <code>DataFlavor</code>s equal to the specified <code>DataFlavor</code>) * to the specified <code>String</code> native. * Unlike <code>getNativesForFlavor</code>, the mapping will only be * established in one direction, and the native will not be encoded. To * establish a two-way mapping, call * <code>addFlavorForUnencodedNative</code> as well. The new mapping will * be of lower priority than any existing mapping. * This method has no effect if a mapping from the specified or equal * <code>DataFlavor</code> to the specified <code>String</code> native * already exists. * * @param flav the <code>DataFlavor</code> key for the mapping * @param nat the <code>String</code> native value for the mapping * @throws NullPointerException if flav or nat is <code>null</code> * * @see #addFlavorForUnencodedNative * @since 1.4 */ public synchronized void addUnencodedNativeForFlavor(DataFlavor flav, String nat) { Objects.requireNonNull(nat, "Null native not permitted"); Objects.requireNonNull(flav, "Null flavor not permitted"); LinkedHashSet<String> natives = getFlavorToNative().get(flav); if (natives == null) { natives = new LinkedHashSet<>(1); getFlavorToNative().put(flav, natives); } natives.add(nat); nativesForFlavorCache.remove(flav); } /** * Discards the current mappings for the specified <code>DataFlavor</code> * and all <code>DataFlavor</code>s equal to the specified * <code>DataFlavor</code>, and creates new mappings to the * specified <code>String</code> natives. * Unlike <code>getNativesForFlavor</code>, the mappings will only be * established in one direction, and the natives will not be encoded. To * establish two-way mappings, call <code>setFlavorsForNative</code> * as well. The first native in the array will represent the highest * priority mapping. Subsequent natives will represent mappings of * decreasing priority. * <p> * If the array contains several elements that reference equal * <code>String</code> natives, this method will establish new mappings * for the first of those elements and ignore the rest of them. * <p> * It is recommended that client code not reset mappings established by the * data transfer subsystem. This method should only be used for * application-level mappings. * * @param flav the <code>DataFlavor</code> key for the mappings * @param natives the <code>String</code> native values for the mappings * @throws NullPointerException if flav or natives is <code>null</code> * or if natives contains <code>null</code> elements * * @see #setFlavorsForNative * @since 1.4 */ public synchronized void setNativesForFlavor(DataFlavor flav, String[] natives) { Objects.requireNonNull(natives, "Null natives not permitted"); Objects.requireNonNull(flav, "Null flavors not permitted"); getFlavorToNative().remove(flav); for (String aNative : natives) { addUnencodedNativeForFlavor(flav, aNative); } disabledMappingGenerationKeys.add(flav); nativesForFlavorCache.remove(flav); } /** * Adds a mapping from a single <code>String</code> native to a single * <code>DataFlavor</code>. Unlike <code>getFlavorsForNative</code>, the * mapping will only be established in one direction, and the native will * not be encoded. To establish a two-way mapping, call * <code>addUnencodedNativeForFlavor</code> as well. The new mapping will * be of lower priority than any existing mapping. * This method has no effect if a mapping from the specified * <code>String</code> native to the specified or equal * <code>DataFlavor</code> already exists. * * @param nat the <code>String</code> native key for the mapping * @param flav the <code>DataFlavor</code> value for the mapping * @throws NullPointerException if nat or flav is <code>null</code> * * @see #addUnencodedNativeForFlavor * @since 1.4 */ public synchronized void addFlavorForUnencodedNative(String nat, DataFlavor flav) { Objects.requireNonNull(nat, "Null native not permitted"); Objects.requireNonNull(flav, "Null flavor not permitted"); LinkedHashSet<DataFlavor> flavors = getNativeToFlavor().get(nat); if (flavors == null) { flavors = new LinkedHashSet<>(1); getNativeToFlavor().put(nat, flavors); } flavors.add(flav); flavorsForNativeCache.remove(nat); } /** * Discards the current mappings for the specified <code>String</code> * native, and creates new mappings to the specified * <code>DataFlavor</code>s. Unlike <code>getFlavorsForNative</code>, the * mappings will only be established in one direction, and the natives need * not be encoded. To establish two-way mappings, call * <code>setNativesForFlavor</code> as well. The first * <code>DataFlavor</code> in the array will represent the highest priority * mapping. Subsequent <code>DataFlavor</code>s will represent mappings of * decreasing priority. * <p> * If the array contains several elements that reference equal * <code>DataFlavor</code>s, this method will establish new mappings * for the first of those elements and ignore the rest of them. * <p> * It is recommended that client code not reset mappings established by the * data transfer subsystem. This method should only be used for * application-level mappings. * * @param nat the <code>String</code> native key for the mappings * @param flavors the <code>DataFlavor</code> values for the mappings * @throws NullPointerException if nat or flavors is <code>null</code> * or if flavors contains <code>null</code> elements * * @see #setNativesForFlavor * @since 1.4 */ public synchronized void setFlavorsForNative(String nat, DataFlavor[] flavors) { Objects.requireNonNull(nat, "Null native not permitted"); Objects.requireNonNull(flavors, "Null flavors not permitted"); getNativeToFlavor().remove(nat); for (DataFlavor flavor : flavors) { addFlavorForUnencodedNative(nat, flavor); } disabledMappingGenerationKeys.add(nat); flavorsForNativeCache.remove(nat); } /** * Encodes a MIME type for use as a <code>String</code> native. The format * of an encoded representation of a MIME type is implementation-dependent. * The only restrictions are: * <ul> * <li>The encoded representation is <code>null</code> if and only if the * MIME type <code>String</code> is <code>null</code>.</li> * <li>The encoded representations for two non-<code>null</code> MIME type * <code>String</code>s are equal if and only if these <code>String</code>s * are equal according to <code>String.equals(Object)</code>.</li> * </ul> * <p> * The reference implementation of this method returns the specified MIME * type <code>String</code> prefixed with <code>JAVA_DATAFLAVOR:</code>. * * @param mimeType the MIME type to encode * @return the encoded <code>String</code>, or <code>null</code> if * mimeType is <code>null</code> */ public static String encodeJavaMIMEType(String mimeType) { return (mimeType != null) ? JavaMIME + mimeType : null; } /** * Encodes a <code>DataFlavor</code> for use as a <code>String</code> * native. The format of an encoded <code>DataFlavor</code> is * implementation-dependent. The only restrictions are: * <ul> * <li>The encoded representation is <code>null</code> if and only if the * specified <code>DataFlavor</code> is <code>null</code> or its MIME type * <code>String</code> is <code>null</code>.</li> * <li>The encoded representations for two non-<code>null</code> * <code>DataFlavor</code>s with non-<code>null</code> MIME type * <code>String</code>s are equal if and only if the MIME type * <code>String</code>s of these <code>DataFlavor</code>s are equal * according to <code>String.equals(Object)</code>.</li> * </ul> * <p> * The reference implementation of this method returns the MIME type * <code>String</code> of the specified <code>DataFlavor</code> prefixed * with <code>JAVA_DATAFLAVOR:</code>. * * @param flav the <code>DataFlavor</code> to encode * @return the encoded <code>String</code>, or <code>null</code> if * flav is <code>null</code> or has a <code>null</code> MIME type */ public static String encodeDataFlavor(DataFlavor flav) { return (flav != null) ? SystemFlavorMap.encodeJavaMIMEType(flav.getMimeType()) : null; } /** * Returns whether the specified <code>String</code> is an encoded Java * MIME type. * * @param str the <code>String</code> to test * @return <code>true</code> if the <code>String</code> is encoded; * <code>false</code> otherwise */ public static boolean isJavaMIMEType(String str) { return (str != null && str.startsWith(JavaMIME, 0)); } /** * Decodes a <code>String</code> native for use as a Java MIME type. * * @param nat the <code>String</code> to decode * @return the decoded Java MIME type, or <code>null</code> if nat is not * an encoded <code>String</code> native */ public static String decodeJavaMIMEType(String nat) { return (isJavaMIMEType(nat)) ? nat.substring(JavaMIME.length(), nat.length()).trim() : null; } /** * Decodes a <code>String</code> native for use as a * <code>DataFlavor</code>. * * @param nat the <code>String</code> to decode * @return the decoded <code>DataFlavor</code>, or <code>null</code> if * nat is not an encoded <code>String</code> native */ public static DataFlavor decodeDataFlavor(String nat) throws ClassNotFoundException { String retval_str = SystemFlavorMap.decodeJavaMIMEType(nat); return (retval_str != null) ? new DataFlavor(retval_str) : null; } private static final class SoftCache<K, V> { Map<K, SoftReference<LinkedHashSet<V>>> cache; public void put(K key, LinkedHashSet<V> value) { if (cache == null) { cache = new HashMap<>(1); } cache.put(key, new SoftReference<>(value)); } public void remove(K key) { if (cache == null) return; cache.remove(null); cache.remove(key); } public LinkedHashSet<V> check(K key) { if (cache == null) return null; SoftReference<LinkedHashSet<V>> ref = cache.get(key); if (ref != null) { return ref.get(); } return null; } } }
package de.fhpotsdam.unfolding.mapdisplay; import java.util.ArrayList; import java.util.Comparator; import java.util.Hashtable; import java.util.List; import java.util.Vector; import org.apache.log4j.Logger; import processing.core.PGraphics; import processing.core.PVector; import de.fhpotsdam.unfolding.UnfoldingMap; import de.fhpotsdam.unfolding.core.Coordinate; import de.fhpotsdam.unfolding.geo.Location; import de.fhpotsdam.unfolding.marker.Marker; import de.fhpotsdam.unfolding.marker.MarkerManager; import de.fhpotsdam.unfolding.providers.AbstractMapProvider; import de.fhpotsdam.unfolding.tiles.TileLoader; import de.fhpotsdam.unfolding.tiles.TileLoaderListener; import de.fhpotsdam.unfolding.utils.ScreenPosition; /** * Handles tiles management and display, and map location and screen position conversions. * * Use {@link UnfoldingMap} in your application. This is the internal class, and should be used only if you know what * you are doing. * */ public abstract class AbstractMapDisplay implements TileLoaderListener { public static final int TILE_WIDTH = 256; public static final int TILE_HEIGHT = 256; // If less than this memory threshold is reached, oldest tile images will be deleted. private static final long MEMORY_THRESHOLD_BYTES = 300000; // Number of tile images to delete. private static final int MEMORY_THRESHOLD_IMAGES = 25; public static Logger log = Logger.getLogger(AbstractMapDisplay.class); // Dimension of this map display protected float width; protected float height; // Map values: Inner position and scale (used for tiles) public double innerOffsetX = -TILE_WIDTH / 2; // half the world width, at zoom 0 public double innerOffsetY = -TILE_HEIGHT / 2; // half the world height, at zoom 0 public float innerScale = 1; public float innerAngle; // MapDisplay values: Outer position, rotation, and scale /** Vertical offset of this MapDisplay (in screen coordinates). */ public float offsetX; /** Horizontal offset of this MapDisplay (in screen coordinates). */ public float offsetY; /** Rotation of this MapDisplay. */ public float angle; /** Scale of this MapDisplay. Not used yet! */ public float scale = 1.0f; /** Center of outer transformations in screen pixel. */ protected PVector transformationCenter; // MapDisplay values: Inner stuff protected PVector innerTransformationCenter; // List of MarkerManager with one default MarkerManager protected List<MarkerManager<Marker>> markerManagerList; // Tiles public int max_pending = 4; //public int max_images_to_keep = 256; public int max_images_to_keep = 1024; public int grid_padding = 1; // set to 0 for debugging purposes /** Check whether all currently visible tiles have been loaded. */ protected boolean allTilesLoaded = false; protected AbstractMapProvider provider; /** Pending threads to load tiles for coordinate. */ protected Hashtable<Coordinate, Runnable> pending = new Hashtable<Coordinate, Runnable>(); /** Loaded tiles for coordinate. */ protected Hashtable<Coordinate, Object> images = new Hashtable<Coordinate, Object>(); /** Queue of coordinates to create threads and load tiles. */ protected Vector<Coordinate> queue = new Vector<Coordinate>(); protected Vector<Object> recent_images = new Vector<Object>(); protected ZoomComparator zoomComparator = new ZoomComparator(); protected QueueSorter queueSorter = new QueueSorter(); private MarkerManager<Marker> defaultMarkerManager = null; protected AbstractMapDisplay(AbstractMapProvider provider, float width, float height) { this.provider = provider; this.width = width; this.height = height; transformationCenter = new PVector(width / 2, height / 2); innerTransformationCenter = new PVector(width / 2, height / 2); innerScale = (float) Math.ceil(Math.min(height / (float) TILE_WIDTH, width / (float) TILE_HEIGHT)); markerManagerList = new ArrayList<MarkerManager<Marker>>(); } public void resize(float width, float height) { this.width = width; this.height = height; } public AbstractMapProvider getMapProvider() { return this.provider; } public void setMapProvider(AbstractMapProvider provider) { if (this.provider.getClass() != provider.getClass()) { this.provider = provider; images.clear(); queue.clear(); pending.clear(); } } public abstract PGraphics getInnerPG(); public abstract PGraphics getOuterPG(); public PGraphics getMask() { return null; } public abstract void draw(); /** * Sets background color of map. * * @param color * Color for the background canvas. Can be semi-transparent. If null it is not used. */ public abstract void setBackgroundColor(Integer color); // MarkerManagement ----------------------------------------------- /** * You need to set the map of the given MarkerManager before using. */ public void addMarkerManager(MarkerManager<Marker> markerManager) { // Replace default MarkerManager, if only default exists and has no entries if (markerManagerList.size() == 1) { MarkerManager<?> mm = markerManagerList.get(0); if (mm.getMarkers().size() == 0 && mm.equals(this.defaultMarkerManager)) { markerManagerList.remove(0); this.defaultMarkerManager = null; } } markerManagerList.add(markerManager); } public MarkerManager<Marker> getLastMarkerManager() { return markerManagerList.get(markerManagerList.size() - 1); } public MarkerManager<Marker> getDefaultMarkerManager() { return getMarkerManager(0); } @Deprecated public MarkerManager<Marker> getMarkerManager() { return getDefaultMarkerManager(); } public MarkerManager<Marker> getMarkerManager(int index) { if (markerManagerList.size() > index) { return markerManagerList.get(index); } else { return null; } } /** * Adds a marker to the default marker manager. * * If you have more than one marker manager, use {@link MarkerManager#addMarker(Marker)} instead. * * @param marker * The marker to add. */ public void addMarker(Marker marker) { getDefaultMarkerManager().addMarker(marker); } /** * Adds multiple markers to the default marker manager. * * If you have more than one marker manager, use {@link MarkerManager#addMarkers(List)} instead. * * @param markers * The markers to add. */ public void addMarkers(List<Marker> markers) { getDefaultMarkerManager().addMarkers(markers); } // TRANSFORMATION -------------------------------------------------- public float getWidth() { return width; } public float getHeight() { return height; } /** * Updates the matrix to transform the map with with the current transformation center. */ public abstract void calculateMatrix(); public abstract void calculateInnerMatrix(); /** * Calculates offset and rotation for screen canvas position, to be used with the internal transformation matrix. * * @param x * Cartesian x coordinate. * @param y * Cartesian y coordinate. * @param inverse * Indicates back and forward matrix calculation. Inverse is used for point2location, non-inverse for * location2point. * @return An 1d-2elements-array with x and y. */ protected abstract float[] getTransformedPosition(float x, float y, boolean inverse); public abstract float[] getObjectFromInnerObjectPosition(float x, float y); public abstract float[] getInnerObjectFromObjectPosition(float x, float y); public abstract float[] getScreenFromInnerObjectPosition(float x, float y); @Deprecated public abstract float[] getInnerObjectFromScreenPosition(float x, float y); public abstract float[] getInnerObject(ScreenPosition screenPosition); public abstract float[] getScreenFromObjectPosition(float x, float y); public abstract float[] getObjectFromScreenPosition(float x, float y); public abstract Location getLocationFromInnerObjectPosition(float x, float y); @Deprecated public abstract Location getLocationFromScreenPosition(float x, float y); public abstract Location getLocation(float x, float y); public abstract Location getLocation(ScreenPosition screenPosition); public abstract Location getLocationFromObjectPosition(float x, float y); public abstract float[] getInnerObjectFromLocation(Location location); @Deprecated public abstract float[] getScreenPositionFromLocation(Location location); public abstract ScreenPosition getScreenPosition(Location location); public abstract ScreenPosition getScreenPositionFloat(Location location); public abstract float[] getObjectFromLocation(Location location); public PVector getTransformationCenter() { return transformationCenter; } public PVector getInnerTransformationCenter() { return innerTransformationCenter; } /** * Set outer transformation center. * * @param transformationCenter * Point in screen coordinates. */ public void setTransformationCenter(PVector transformationCenter) { // NB Offset subtraction due to special handling (i.e. not included in matrix) this.transformationCenter.x = transformationCenter.x - offsetX; this.transformationCenter.y = transformationCenter.y - offsetY; } /** * Set inner transformation center. * * @param innerTransformationCenter * Point in screen coordinates. */ public void setInnerTransformationCenter(PVector innerTransformationCenter) { float[] xy = getObjectFromScreenPosition(innerTransformationCenter.x, innerTransformationCenter.y); this.innerTransformationCenter.x = xy[0] - (float) innerOffsetX; this.innerTransformationCenter.y = xy[1] - (float) innerOffsetY; } // TILES -------------------------------------------------------- public void processQueue() { while (pending.size() < max_pending && queue.size() > 0) { Coordinate coord = (Coordinate) queue.remove(0); TileLoader tileLoader = createTileLoader(coord); pending.put(coord, tileLoader); new Thread(tileLoader).start(); } } protected abstract TileLoader createTileLoader(Coordinate coord); public void grabTile(Coordinate coord) { if (!pending.containsKey(coord) && !queue.contains(coord) && !images.containsKey(coord)) queue.add(coord); } // TODO images & pending thread safe? public void tileLoaded(Coordinate coord, Object image) { if (pending.containsKey(coord) && coord != null && image != null) { images.put(coord, image); pending.remove(coord); } else { // Re-adds to queue queue.add(coord); pending.remove(coord); } if (pending.size() == 0 && queue.size() == 0) { allTilesLoaded = true; tilesLoaded(); } else { allTilesLoaded = false; } } /** * Check whether all currently visible tiles have been loaded. * * @return True if all tiles have been loaded, false otherwise. */ public boolean allTilesLoaded() { return allTilesLoaded; } /** * Will be called if all tiles have been loaded. Subclasses can implement this method to notify a method in the * client app. */ public abstract void tilesLoaded(); // LOAD SORTING public class QueueSorter implements Comparator<Coordinate> { Coordinate center; public void setCenter(Coordinate center) { this.center = center; } public int compare(Coordinate c1, Coordinate c2) { if (c1.zoom == center.zoom) { if (c2.zoom == center.zoom) { // only compare squared distances, saves cpu float d1 = (float) Math.pow(c1.column - center.column, 2) + (float) Math.pow(c1.row - center.row, 2); float d2 = (float) Math.pow(c2.column - center.column, 2) + (float) Math.pow(c2.row - center.row, 2); return d1 < d2 ? -1 : d1 > d2 ? 1 : 0; } else { return -1; } } else if (c2.zoom == center.zoom) { return 1; } else { float d1 = Math.abs(c1.zoom - center.zoom); float d2 = Math.abs(c2.zoom - center.zoom); return d1 < d2 ? -1 : d1 > d2 ? 1 : 0; } } } public class ZoomComparator implements Comparator<Coordinate> { public int compare(Coordinate c1, Coordinate c2) { return c1.zoom < c2.zoom ? -1 : c1.zoom > c2.zoom ? 1 : 0; } } /** * Cleans oldest images if too many images exist, or if memory is too full. * * Tiles are added to the recency-based list to allow removing oldest ones from images-array. * * REVISIT Check java.lang.ref.SoftReference for better solution. */ protected void cleanupImageBuffer() { if (recent_images.size() > max_images_to_keep) { log.info("Cleaning image buffer due to MAX_IMAGE reached."); recent_images.subList(0, recent_images.size() - max_images_to_keep).clear(); images.values().retainAll(recent_images); } else if (Runtime.getRuntime().freeMemory() < MEMORY_THRESHOLD_BYTES) { log.info("Cleaning image buffer due to MEMORY_THRESHOLD reached."); int imagesToDelete = recent_images.size() > MEMORY_THRESHOLD_IMAGES ? MEMORY_THRESHOLD_IMAGES : recent_images.size(); recent_images.subList(0, imagesToDelete).clear(); images.values().retainAll(recent_images); } } protected void cleanupImageBuffer(boolean force) { if (force) { images.clear(); } else { cleanupImageBuffer(); } } /** * Set the map provider, dynamically. The currently selected area, as well as all events etc will stay. * * Note that the image buffer will be cleaned, i.e. all tiles need to be loaded anew. * * @param provider * The provider to use. */ public void setProvider(AbstractMapProvider provider) { this.provider = provider; cleanupImageBuffer(true); } protected void createDefaultMarkerManager(UnfoldingMap map) { if (this.defaultMarkerManager == null) { this.defaultMarkerManager = new MarkerManager<Marker>(); this.defaultMarkerManager.setMap(map); markerManagerList.add(defaultMarkerManager); } } public List<MarkerManager<Marker>> getMarkerManagerList() { return markerManagerList; } public void removeMarkerManager(MarkerManager<Marker> markerManager) { markerManagerList.remove(markerManager); } public void removeMarkerManager(int i) { markerManagerList.remove(i); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.completion; import com.intellij.codeInsight.completion.impl.CompletionServiceImpl; import com.intellij.codeInsight.completion.impl.CompletionSorterImpl; import com.intellij.codeInsight.lookup.*; import com.intellij.codeInsight.lookup.impl.EmptyLookupItem; import com.intellij.codeInsight.lookup.impl.LookupImpl; import com.intellij.codeInsight.template.impl.LiveTemplateLookupElement; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.featureStatistics.FeatureUsageTrackerImpl; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.editor.event.DocumentAdapter; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.statistics.StatisticsInfo; import com.intellij.util.Alarm; import com.intellij.util.ProcessingContext; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.*; public class CompletionLookupArranger extends LookupArranger { private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.CompletionLookupArranger"); @Nullable private static StatisticsUpdate ourPendingUpdate; private static final Alarm ourStatsAlarm = new Alarm(ApplicationManager.getApplication()); private static final Key<String> PRESENTATION_INVARIANT = Key.create("PRESENTATION_INVARIANT"); private static final Comparator<LookupElement> BY_PRESENTATION_COMPARATOR = new Comparator<LookupElement>() { @Override public int compare(LookupElement o1, LookupElement o2) { String invariant = PRESENTATION_INVARIANT.get(o1); assert invariant != null; return invariant.compareToIgnoreCase(PRESENTATION_INVARIANT.get(o2)); } }; private static final int MAX_PREFERRED_COUNT = 5; public static final Key<Boolean> PURE_RELEVANCE = Key.create("PURE_RELEVANCE"); public static final Key<Integer> PREFIX_CHANGES = Key.create("PREFIX_CHANGES"); private static final UISettings ourUISettings = UISettings.getInstance(); private final List<LookupElement> myFrozenItems = new ArrayList<LookupElement>(); static { Disposer.register(ApplicationManager.getApplication(), new Disposable() { @Override public void dispose() { cancelLastCompletionStatisticsUpdate(); } }); } private final CompletionLocation myLocation; private final CompletionParameters myParameters; private final CompletionProgressIndicator myProcess; @SuppressWarnings({"MismatchedQueryAndUpdateOfCollection"}) private final Map<CompletionSorterImpl, Classifier<LookupElement>> myClassifiers = new LinkedHashMap<CompletionSorterImpl, Classifier<LookupElement>>(); private int myPrefixChanges; public CompletionLookupArranger(final CompletionParameters parameters, CompletionProgressIndicator process) { myParameters = parameters; myProcess = process; myLocation = new CompletionLocation(parameters); } private MultiMap<CompletionSorterImpl, LookupElement> groupItemsBySorter(List<LookupElement> source) { MultiMap<CompletionSorterImpl, LookupElement> inputBySorter = new MultiMap<CompletionSorterImpl, LookupElement>() { @Override protected Map<CompletionSorterImpl, Collection<LookupElement>> createMap() { return ContainerUtil.newLinkedHashMap(); } }; for (LookupElement element : source) { inputBySorter.putValue(obtainSorter(element), element); } return inputBySorter; } @NotNull private CompletionSorterImpl obtainSorter(LookupElement element) { return myProcess.getSorter(element); } @Override public Map<LookupElement, StringBuilder> getRelevanceStrings() { final LinkedHashMap<LookupElement,StringBuilder> map = new LinkedHashMap<LookupElement, StringBuilder>(); for (LookupElement item : myItems) { map.put(item, new StringBuilder()); } final MultiMap<CompletionSorterImpl, LookupElement> inputBySorter = groupItemsBySorter(new ArrayList<LookupElement>(map.keySet())); if (inputBySorter.size() > 1) { for (LookupElement element : map.keySet()) { map.get(element).append(obtainSorter(element)).append(": "); } } for (CompletionSorterImpl sorter : inputBySorter.keySet()) { final LinkedHashMap<LookupElement, StringBuilder> subMap = new LinkedHashMap<LookupElement, StringBuilder>(); for (LookupElement element : inputBySorter.get(sorter)) { subMap.put(element, map.get(element)); } Classifier<LookupElement> classifier = myClassifiers.get(sorter); if (classifier != null) { classifier.describeItems(subMap, createContext(false)); } } return map; } @Override public void addElement(Lookup lookup, LookupElement element, LookupElementPresentation presentation) { StatisticsWeigher.clearBaseStatisticsInfo(element); final String invariant = presentation.getItemText() + "\0###" + getTailTextOrSpace(presentation) + "###" + presentation.getTypeText(); element.putUserData(PRESENTATION_INVARIANT, invariant); CompletionSorterImpl sorter = obtainSorter(element); Classifier<LookupElement> classifier = myClassifiers.get(sorter); if (classifier == null) { myClassifiers.put(sorter, classifier = sorter.buildClassifier(new AlphaClassifier(lookup))); } classifier.addElement(element); super.addElement(lookup, element, presentation); } @NotNull private static String getTailTextOrSpace(LookupElementPresentation presentation) { String tailText = presentation.getTailText(); return tailText == null || tailText.isEmpty() ? " " : tailText; } private static List<LookupElement> sortByPresentation(Iterable<LookupElement> source, Lookup lookup) { ArrayList<LookupElement> startMatches = ContainerUtil.newArrayList(); ArrayList<LookupElement> middleMatches = ContainerUtil.newArrayList(); for (LookupElement element : source) { (CompletionServiceImpl.isStartMatch(element, lookup) ? startMatches : middleMatches).add(element); } ContainerUtil.sort(startMatches, BY_PRESENTATION_COMPARATOR); ContainerUtil.sort(middleMatches, BY_PRESENTATION_COMPARATOR); startMatches.addAll(middleMatches); return startMatches; } private static boolean isAlphaSorted() { return ourUISettings.SORT_LOOKUP_ELEMENTS_LEXICOGRAPHICALLY; } @Override public Pair<List<LookupElement>, Integer> arrangeItems(@NotNull Lookup lookup, boolean onExplicitAction) { List<LookupElement> items = getMatchingItems(); MultiMap<CompletionSorterImpl, LookupElement> itemsBySorter = groupItemsBySorter(items); LookupElement relevantSelection = findMostRelevantItem(itemsBySorter); List<LookupElement> listModel = isAlphaSorted() ? sortByPresentation(items, lookup) : fillModelByRelevance((LookupImpl)lookup, items, itemsBySorter, relevantSelection); int toSelect = getItemToSelect((LookupImpl)lookup, listModel, onExplicitAction, relevantSelection); LOG.assertTrue(toSelect >= 0); addDummyItems(items.size() - listModel.size(), listModel); return new Pair<List<LookupElement>, Integer>(listModel, toSelect); } private static void addDummyItems(int count, List<LookupElement> listModel) { EmptyLookupItem dummy = new EmptyLookupItem("loading...", true); for (int i = count; i > 0; i--) { listModel.add(dummy); } } private List<LookupElement> fillModelByRelevance(LookupImpl lookup, List<LookupElement> items, MultiMap<CompletionSorterImpl, LookupElement> inputBySorter, @Nullable LookupElement relevantSelection) { Iterator<LookupElement> byRelevance = sortByRelevance(inputBySorter).iterator(); final LinkedHashSet<LookupElement> model = new LinkedHashSet<LookupElement>(); addPrefixItems(model); addFrozenItems(items, model); addSomeItems(model, byRelevance, new Condition<LookupElement>() { @Override public boolean value(LookupElement lastAdded) { return model.size() >= MAX_PREFERRED_COUNT; } }); addCurrentlySelectedItemToTop(lookup, items, model); freezeTopItems(lookup, model); ensureItemAdded(items, model, byRelevance, lookup.getCurrentItem()); ensureItemAdded(items, model, byRelevance, relevantSelection); ensureEverythingVisibleAdded(lookup, model, byRelevance); ArrayList<LookupElement> result = new ArrayList<LookupElement>(model); if (result.size() > 1) { LookupElement first = result.get(0); if (isLiveTemplate(first) && isPrefixItem(lookup, first, true)) { ContainerUtil.swapElements(result, 0, 1); } } return result; } private static void ensureEverythingVisibleAdded(LookupImpl lookup, final LinkedHashSet<LookupElement> model, Iterator<LookupElement> byRelevance) { JList list = lookup.getList(); final boolean testMode = ApplicationManager.getApplication().isUnitTestMode(); final int limit = Math.max(list.getLastVisibleIndex(), model.size()) + ourUISettings.MAX_LOOKUP_LIST_HEIGHT * 3; addSomeItems(model, byRelevance, new Condition<LookupElement>() { @Override public boolean value(LookupElement lastAdded) { return !testMode && model.size() >= limit; } }); } private static void ensureItemAdded(List<LookupElement> items, LinkedHashSet<LookupElement> model, Iterator<LookupElement> byRelevance, @Nullable final LookupElement item) { if (item != null && ContainerUtil.indexOfIdentity(items, item) >= 0 && !model.contains(item)) { addSomeItems(model, byRelevance, new Condition<LookupElement>() { @Override public boolean value(LookupElement lastAdded) { return lastAdded == item; } }); } } private void freezeTopItems(LookupImpl lookup, LinkedHashSet<LookupElement> model) { myFrozenItems.clear(); if (lookup.isShown()) { myFrozenItems.addAll(model); } } private void addFrozenItems(List<LookupElement> items, LinkedHashSet<LookupElement> model) { myFrozenItems.retainAll(items); model.addAll(myFrozenItems); } private void addPrefixItems(LinkedHashSet<LookupElement> model) { ContainerUtil.addAll(model, sortByRelevance(groupItemsBySorter(getPrefixItems(true)))); ContainerUtil.addAll(model, sortByRelevance(groupItemsBySorter(getPrefixItems(false)))); } private static void addCurrentlySelectedItemToTop(Lookup lookup, List<LookupElement> items, LinkedHashSet<LookupElement> model) { if (!lookup.isSelectionTouched()) { LookupElement lastSelection = lookup.getCurrentItem(); if (ContainerUtil.indexOfIdentity(items, lastSelection) >= 0) { model.add(lastSelection); } } } private static void addSomeItems(LinkedHashSet<LookupElement> model, Iterator<LookupElement> iterator, Condition<LookupElement> stopWhen) { while (iterator.hasNext()) { LookupElement item = iterator.next(); model.add(item); if (stopWhen.value(item)) { break; } } } private Iterable<LookupElement> sortByRelevance(MultiMap<CompletionSorterImpl, LookupElement> inputBySorter) { final List<Iterable<LookupElement>> byClassifier = ContainerUtil.newArrayList(); for (CompletionSorterImpl sorter : myClassifiers.keySet()) { ProcessingContext context = createContext(false); byClassifier.add(myClassifiers.get(sorter).classify(inputBySorter.get(sorter), context)); } //noinspection unchecked return ContainerUtil.concat(byClassifier.toArray(new Iterable[byClassifier.size()])); } private ProcessingContext createContext(boolean pureRelevance) { ProcessingContext context = new ProcessingContext(); context.put(PREFIX_CHANGES, myPrefixChanges); if (pureRelevance) { context.put(PURE_RELEVANCE, Boolean.TRUE); } return context; } @Override public LookupArranger createEmptyCopy() { return new CompletionLookupArranger(myParameters, myProcess); } private static int getItemToSelect(LookupImpl lookup, List<LookupElement> items, boolean onExplicitAction, @Nullable LookupElement mostRelevant) { if (items.isEmpty() || lookup.getFocusDegree() == LookupImpl.FocusDegree.UNFOCUSED) { return 0; } if (lookup.isSelectionTouched() || !onExplicitAction) { final LookupElement lastSelection = lookup.getCurrentItem(); int old = ContainerUtil.indexOfIdentity(items, lastSelection); if (old >= 0) { return old; } Object selectedValue = lookup.getList().getSelectedValue(); if (selectedValue instanceof EmptyLookupItem && ((EmptyLookupItem)selectedValue).isLoading()) { int index = lookup.getList().getSelectedIndex(); if (index >= 0 && index < items.size()) { return index; } } for (int i = 0; i < items.size(); i++) { String invariant = PRESENTATION_INVARIANT.get(items.get(i)); if (invariant != null && invariant.equals(PRESENTATION_INVARIANT.get(lastSelection))) { return i; } } } String selectedText = lookup.getEditor().getSelectionModel().getSelectedText(); for (int i = 0; i < items.size(); i++) { LookupElement item = items.get(i); if (isAlphaSorted() && isPrefixItem(lookup, item, true) && !isLiveTemplate(item) || item.getLookupString().equals(selectedText)) { return i; } } return Math.max(0, ContainerUtil.indexOfIdentity(items, mostRelevant)); } @Nullable private LookupElement findMostRelevantItem(MultiMap<CompletionSorterImpl, LookupElement> itemsBySorter) { final CompletionPreselectSkipper[] skippers = CompletionPreselectSkipper.EP_NAME.getExtensions(); for (CompletionSorterImpl sorter : myClassifiers.keySet()) { ProcessingContext context = createContext(true); for (LookupElement element : myClassifiers.get(sorter).classify(itemsBySorter.get(sorter), context)) { if (!shouldSkip(skippers, element)) { return element; } } } return null; } private static boolean isLiveTemplate(LookupElement element) { return element instanceof LiveTemplateLookupElement && ((LiveTemplateLookupElement)element).sudden; } public static StatisticsUpdate collectStatisticChanges(LookupElement item, final Lookup lookup) { applyLastCompletionStatisticsUpdate(); final StatisticsInfo base = StatisticsWeigher.getBaseStatisticsInfo(item, null); if (base == StatisticsInfo.EMPTY) { return new StatisticsUpdate(StatisticsInfo.EMPTY); } StatisticsUpdate update = new StatisticsUpdate(StatisticsWeigher.composeStatsWithPrefix(base, lookup.itemPattern(item), true)); ourPendingUpdate = update; Disposer.register(update, new Disposable() { @Override public void dispose() { //noinspection AssignmentToStaticFieldFromInstanceMethod ourPendingUpdate = null; } }); return update; } public static void trackStatistics(InsertionContext context, final StatisticsUpdate update) { if (ourPendingUpdate != update) { return; } final Document document = context.getDocument(); int startOffset = context.getStartOffset(); int tailOffset = context.getEditor().getCaretModel().getOffset(); if (startOffset < 0 || tailOffset <= startOffset) { return; } final RangeMarker marker = document.createRangeMarker(startOffset, tailOffset); final DocumentAdapter listener = new DocumentAdapter() { @Override public void beforeDocumentChange(DocumentEvent e) { if (!marker.isValid() || e.getOffset() > marker.getStartOffset() && e.getOffset() < marker.getEndOffset()) { cancelLastCompletionStatisticsUpdate(); } } }; ourStatsAlarm.addRequest(new Runnable() { @Override public void run() { if (ourPendingUpdate == update) { applyLastCompletionStatisticsUpdate(); } } }, 20 * 1000); document.addDocumentListener(listener); Disposer.register(update, new Disposable() { @Override public void dispose() { document.removeDocumentListener(listener); marker.dispose(); ourStatsAlarm.cancelAllRequests(); } }); } public static void cancelLastCompletionStatisticsUpdate() { if (ourPendingUpdate != null) { Disposer.dispose(ourPendingUpdate); assert ourPendingUpdate == null; } } public static void applyLastCompletionStatisticsUpdate() { StatisticsUpdate update = ourPendingUpdate; if (update != null) { update.performUpdate(); Disposer.dispose(update); assert ourPendingUpdate == null; } } private boolean shouldSkip(CompletionPreselectSkipper[] skippers, LookupElement element) { for (final CompletionPreselectSkipper skipper : skippers) { if (skipper.skipElement(element, myLocation)) { return true; } } return false; } @Override public void prefixChanged(Lookup lookup) { myPrefixChanges++; myFrozenItems.clear(); super.prefixChanged(lookup); } static class StatisticsUpdate implements Disposable { private final StatisticsInfo myInfo; private int mySpared; public StatisticsUpdate(StatisticsInfo info) { myInfo = info; } void performUpdate() { myInfo.incUseCount(); ((FeatureUsageTrackerImpl)FeatureUsageTracker.getInstance()).getCompletionStatistics().registerInvocation(mySpared); } @Override public void dispose() { } public void addSparedChars(CompletionProgressIndicator indicator, LookupElement item, InsertionContext context, char completionChar) { String textInserted; if (context.getStartOffset() >= 0 && context.getTailOffset() >= context.getStartOffset()) { textInserted = context.getDocument().getText().substring(context.getStartOffset(), context.getTailOffset()); } else { textInserted = item.getLookupString(); } String withoutSpaces = StringUtil.replace(textInserted, new String[]{" ", "\t", "\n"}, new String[]{"", "", ""}); int spared = withoutSpaces.length() - indicator.getLookup().itemPattern(item).length(); if (!LookupEvent.isSpecialCompletionChar(completionChar) && withoutSpaces.contains(String.valueOf(completionChar))) { spared--; } if (spared > 0) { mySpared += spared; } } } private static class AlphaClassifier extends Classifier<LookupElement> { private final Lookup myLookup; private AlphaClassifier(Lookup lookup) { myLookup = lookup; } @Override public void addElement(LookupElement element) { } @Override public Iterable<LookupElement> classify(Iterable<LookupElement> source, ProcessingContext context) { return sortByPresentation(source, myLookup); } @Override public void describeItems(LinkedHashMap<LookupElement, StringBuilder> map, ProcessingContext context) { } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.12.11 at 11:33:59 AM GMT // package org.opencb.biodata.formats.variant.clinvar.v24jaxb; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.bind.annotation.XmlType; /** * Details of a method used to generate variant calls or predict/report * functional consequence. The name of the platform should represent a sequencer or an * array, e.g. sequencing or array , e.g. capillary, 454, Helicos, Solexa, SOLiD. This * structure should also be used if the method is 'Curation'. * * <p>Java class for MethodType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="MethodType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="NamePlatform" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="TypePlatform" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="Purpose" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="ResultType" minOccurs="0"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="number of occurrences"/> * &lt;enumeration value="p value"/> * &lt;enumeration value="odds ratio"/> * &lt;enumeration value="variation call"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element name="MinReported" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="MaxReported" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="ReferenceStandard" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="Citation" type="{}CitationType" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="XRef" type="{}XrefType" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="Description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="Software" type="{}SoftwareSet" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="SourceType" minOccurs="0"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="submitter-generated"/> * &lt;enumeration value="data mining"/> * &lt;enumeration value="data review"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element name="MethodType" type="{}Methodtypelist"/> * &lt;element name="MethodAttribute" maxOccurs="unbounded" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Attribute"> * &lt;complexType> * &lt;simpleContent> * &lt;extension base="&lt;>AttributeType"> * &lt;attribute name="Type" use="required"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="Location"/> * &lt;enumeration value="ControlsAppropriate"/> * &lt;enumeration value="MethodAppropriate"/> * &lt;enumeration value="TestName"/> * &lt;enumeration value="StructVarMethodType"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/attribute> * &lt;/extension> * &lt;/simpleContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="MethodResult" type="{}AttributeType" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "MethodType", propOrder = { "namePlatform", "typePlatform", "purpose", "resultType", "minReported", "maxReported", "referenceStandard", "citation", "xRef", "description", "software", "sourceType", "methodType", "methodAttribute", "methodResult" }) @XmlSeeAlso({ org.opencb.biodata.formats.variant.clinvar.v24jaxb.ObservationSet.Method.class }) public class MethodType { @XmlElement(name = "NamePlatform") protected String namePlatform; @XmlElement(name = "TypePlatform") protected String typePlatform; @XmlElement(name = "Purpose") protected String purpose; @XmlElement(name = "ResultType") protected String resultType; @XmlElement(name = "MinReported") protected String minReported; @XmlElement(name = "MaxReported") protected String maxReported; @XmlElement(name = "ReferenceStandard") protected String referenceStandard; @XmlElement(name = "Citation") protected List<CitationType> citation; @XmlElement(name = "XRef") protected List<XrefType> xRef; @XmlElement(name = "Description") protected String description; @XmlElement(name = "Software") protected List<SoftwareSet> software; @XmlElement(name = "SourceType") protected String sourceType; @XmlElement(name = "MethodType", required = true) @XmlSchemaType(name = "string") protected Methodtypelist methodType; @XmlElement(name = "MethodAttribute") protected List<MethodType.MethodAttribute> methodAttribute; @XmlElement(name = "MethodResult") protected List<AttributeType> methodResult; /** * Gets the value of the namePlatform property. * * @return * possible object is * {@link String } * */ public String getNamePlatform() { return namePlatform; } /** * Sets the value of the namePlatform property. * * @param value * allowed object is * {@link String } * */ public void setNamePlatform(String value) { this.namePlatform = value; } /** * Gets the value of the typePlatform property. * * @return * possible object is * {@link String } * */ public String getTypePlatform() { return typePlatform; } /** * Sets the value of the typePlatform property. * * @param value * allowed object is * {@link String } * */ public void setTypePlatform(String value) { this.typePlatform = value; } /** * Gets the value of the purpose property. * * @return * possible object is * {@link String } * */ public String getPurpose() { return purpose; } /** * Sets the value of the purpose property. * * @param value * allowed object is * {@link String } * */ public void setPurpose(String value) { this.purpose = value; } /** * Gets the value of the resultType property. * * @return * possible object is * {@link String } * */ public String getResultType() { return resultType; } /** * Sets the value of the resultType property. * * @param value * allowed object is * {@link String } * */ public void setResultType(String value) { this.resultType = value; } /** * Gets the value of the minReported property. * * @return * possible object is * {@link String } * */ public String getMinReported() { return minReported; } /** * Sets the value of the minReported property. * * @param value * allowed object is * {@link String } * */ public void setMinReported(String value) { this.minReported = value; } /** * Gets the value of the maxReported property. * * @return * possible object is * {@link String } * */ public String getMaxReported() { return maxReported; } /** * Sets the value of the maxReported property. * * @param value * allowed object is * {@link String } * */ public void setMaxReported(String value) { this.maxReported = value; } /** * Gets the value of the referenceStandard property. * * @return * possible object is * {@link String } * */ public String getReferenceStandard() { return referenceStandard; } /** * Sets the value of the referenceStandard property. * * @param value * allowed object is * {@link String } * */ public void setReferenceStandard(String value) { this.referenceStandard = value; } /** * Gets the value of the citation property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the citation property. * * <p> * For example, to add a new item, do as follows: * <pre> * getCitation().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link CitationType } * * */ public List<CitationType> getCitation() { if (citation == null) { citation = new ArrayList<CitationType>(); } return this.citation; } /** * Gets the value of the xRef property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the xRef property. * * <p> * For example, to add a new item, do as follows: * <pre> * getXRef().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link XrefType } * * */ public List<XrefType> getXRef() { if (xRef == null) { xRef = new ArrayList<XrefType>(); } return this.xRef; } /** * Gets the value of the description property. * * @return * possible object is * {@link String } * */ public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value * allowed object is * {@link String } * */ public void setDescription(String value) { this.description = value; } /** * Gets the value of the software property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the software property. * * <p> * For example, to add a new item, do as follows: * <pre> * getSoftware().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link SoftwareSet } * * */ public List<SoftwareSet> getSoftware() { if (software == null) { software = new ArrayList<SoftwareSet>(); } return this.software; } /** * Gets the value of the sourceType property. * * @return * possible object is * {@link String } * */ public String getSourceType() { return sourceType; } /** * Sets the value of the sourceType property. * * @param value * allowed object is * {@link String } * */ public void setSourceType(String value) { this.sourceType = value; } /** * Gets the value of the methodType property. * * @return * possible object is * {@link Methodtypelist } * */ public Methodtypelist getMethodType() { return methodType; } /** * Sets the value of the methodType property. * * @param value * allowed object is * {@link Methodtypelist } * */ public void setMethodType(Methodtypelist value) { this.methodType = value; } /** * Gets the value of the methodAttribute property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the methodAttribute property. * * <p> * For example, to add a new item, do as follows: * <pre> * getMethodAttribute().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link MethodType.MethodAttribute } * * */ public List<MethodType.MethodAttribute> getMethodAttribute() { if (methodAttribute == null) { methodAttribute = new ArrayList<MethodType.MethodAttribute>(); } return this.methodAttribute; } /** * Gets the value of the methodResult property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the methodResult property. * * <p> * For example, to add a new item, do as follows: * <pre> * getMethodResult().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link AttributeType } * * */ public List<AttributeType> getMethodResult() { if (methodResult == null) { methodResult = new ArrayList<AttributeType>(); } return this.methodResult; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Attribute"> * &lt;complexType> * &lt;simpleContent> * &lt;extension base="&lt;>AttributeType"> * &lt;attribute name="Type" use="required"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="Location"/> * &lt;enumeration value="ControlsAppropriate"/> * &lt;enumeration value="MethodAppropriate"/> * &lt;enumeration value="TestName"/> * &lt;enumeration value="StructVarMethodType"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/attribute> * &lt;/extension> * &lt;/simpleContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "attribute" }) public static class MethodAttribute { @XmlElement(name = "Attribute", required = true) protected MethodType.MethodAttribute.Attribute attribute; /** * Gets the value of the attribute property. * * @return * possible object is * {@link MethodType.MethodAttribute.Attribute } * */ public MethodType.MethodAttribute.Attribute getAttribute() { return attribute; } /** * Sets the value of the attribute property. * * @param value * allowed object is * {@link MethodType.MethodAttribute.Attribute } * */ public void setAttribute(MethodType.MethodAttribute.Attribute value) { this.attribute = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;simpleContent> * &lt;extension base="&lt;>AttributeType"> * &lt;attribute name="Type" use="required"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="Location"/> * &lt;enumeration value="ControlsAppropriate"/> * &lt;enumeration value="MethodAppropriate"/> * &lt;enumeration value="TestName"/> * &lt;enumeration value="StructVarMethodType"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/attribute> * &lt;/extension> * &lt;/simpleContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "") public static class Attribute extends AttributeType { @XmlAttribute(name = "Type", required = true) protected String type; /** * Gets the value of the type property. * * @return * possible object is * {@link String } * */ public String getType() { return type; } /** * Sets the value of the type property. * * @param value * allowed object is * {@link String } * */ public void setType(String value) { this.type = value; } } } }
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.websockets.jsr; import io.undertow.servlet.api.ClassIntrospecter; import io.undertow.servlet.api.InstanceFactory; import io.undertow.servlet.api.InstanceHandle; import io.undertow.servlet.api.ThreadSetupAction; import io.undertow.servlet.spec.ServletContextImpl; import io.undertow.servlet.util.ConstructorInstanceFactory; import io.undertow.servlet.util.ImmediateInstanceHandle; import io.undertow.util.PathTemplate; import io.undertow.websockets.WebSocketExtension; import io.undertow.websockets.client.WebSocketClient; import io.undertow.websockets.client.WebSocketClientNegotiation; import io.undertow.websockets.core.WebSocketChannel; import io.undertow.websockets.jsr.annotated.AnnotatedEndpointFactory; import org.xnio.IoFuture; import org.xnio.IoUtils; import org.xnio.Pool; import org.xnio.XnioWorker; import org.xnio.http.UpgradeFailedException; import org.xnio.ssl.XnioSsl; import javax.servlet.DispatcherType; import javax.websocket.ClientEndpoint; import javax.websocket.ClientEndpointConfig; import javax.websocket.CloseReason; import javax.websocket.DeploymentException; import javax.websocket.Endpoint; import javax.websocket.Extension; import javax.websocket.HandshakeResponse; import javax.websocket.Session; import javax.websocket.server.ServerContainer; import javax.websocket.server.ServerEndpoint; import javax.websocket.server.ServerEndpointConfig; import java.io.Closeable; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.ServiceLoader; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; /** * {@link ServerContainer} implementation which allows to deploy endpoints for a server. * * @author <a href="mailto:nmaurer@redhat.com">Norman Maurer</a> */ public class ServerWebSocketContainer implements ServerContainer, Closeable { public static final String TIMEOUT = "io.undertow.websocket.CONNECT_TIMEOUT"; public static final int DEFAULT_WEB_SOCKET_TIMEOUT_SECONDS = 10; private final ClassIntrospecter classIntrospecter; private final Map<Class<?>, ConfiguredClientEndpoint> clientEndpoints = new HashMap<>(); private final List<ConfiguredServerEndpoint> configuredServerEndpoints = new ArrayList<>(); /** * set of all deployed server endpoint paths. Due to the comparison function we can detect * overlaps */ private final TreeSet<PathTemplate> seenPaths = new TreeSet<>(); private final XnioWorker xnioWorker; private final Pool<ByteBuffer> bufferPool; private final ThreadSetupAction threadSetupAction; private final boolean dispatchToWorker; private final InetSocketAddress clientBindAddress; private final WebSocketReconnectHandler webSocketReconnectHandler; private volatile long defaultAsyncSendTimeout; private volatile long defaultMaxSessionIdleTimeout; private volatile int defaultMaxBinaryMessageBufferSize; private volatile int defaultMaxTextMessageBufferSize; private volatile boolean deploymentComplete = false; private ServletContextImpl contextToAddFilter = null; private final List<WebsocketClientSslProvider> clientSslProviders; public ServerWebSocketContainer(final ClassIntrospecter classIntrospecter, final XnioWorker xnioWorker, Pool<ByteBuffer> bufferPool, ThreadSetupAction threadSetupAction, boolean dispatchToWorker, boolean clientMode) { this(classIntrospecter, ServerWebSocketContainer.class.getClassLoader(), xnioWorker, bufferPool, threadSetupAction, dispatchToWorker, null, null); } public ServerWebSocketContainer(final ClassIntrospecter classIntrospecter, final ClassLoader classLoader, XnioWorker xnioWorker, Pool<ByteBuffer> bufferPool, ThreadSetupAction threadSetupAction, boolean dispatchToWorker) { this(classIntrospecter, classLoader, xnioWorker, bufferPool, threadSetupAction, dispatchToWorker, null, null); } public ServerWebSocketContainer(final ClassIntrospecter classIntrospecter, final ClassLoader classLoader, XnioWorker xnioWorker, Pool<ByteBuffer> bufferPool, ThreadSetupAction threadSetupAction, boolean dispatchToWorker, InetSocketAddress clientBindAddress, WebSocketReconnectHandler reconnectHandler) { this.classIntrospecter = classIntrospecter; this.bufferPool = bufferPool; this.xnioWorker = xnioWorker; this.threadSetupAction = threadSetupAction; this.dispatchToWorker = dispatchToWorker; this.clientBindAddress = clientBindAddress; List<WebsocketClientSslProvider> clientSslProviders = new ArrayList<>(); for (WebsocketClientSslProvider provider : ServiceLoader.load(WebsocketClientSslProvider.class, classLoader)) { clientSslProviders.add(provider); } this.clientSslProviders = Collections.unmodifiableList(clientSslProviders); this.webSocketReconnectHandler = reconnectHandler; } @Override public long getDefaultAsyncSendTimeout() { return defaultAsyncSendTimeout; } @Override public void setAsyncSendTimeout(long defaultAsyncSendTimeout) { this.defaultAsyncSendTimeout = defaultAsyncSendTimeout; } public Session connectToServer(final Object annotatedEndpointInstance, WebSocketClient.ConnectionBuilder connectionBuilder) throws DeploymentException, IOException { ConfiguredClientEndpoint config = getClientEndpoint(annotatedEndpointInstance.getClass(), false); if (config == null) { throw JsrWebSocketMessages.MESSAGES.notAValidClientEndpointType(annotatedEndpointInstance.getClass()); } Endpoint instance = config.getFactory().createInstance(new ImmediateInstanceHandle<Object>(annotatedEndpointInstance)); return connectToServerInternal(instance, config, connectionBuilder); } @Override public Session connectToServer(final Object annotatedEndpointInstance, final URI path) throws DeploymentException, IOException { ConfiguredClientEndpoint config = getClientEndpoint(annotatedEndpointInstance.getClass(), false); if (config == null) { throw JsrWebSocketMessages.MESSAGES.notAValidClientEndpointType(annotatedEndpointInstance.getClass()); } Endpoint instance = config.getFactory().createInstance(new ImmediateInstanceHandle<Object>(annotatedEndpointInstance)); XnioSsl ssl = null; for (WebsocketClientSslProvider provider : clientSslProviders) { ssl = provider.getSsl(xnioWorker, annotatedEndpointInstance, path); if (ssl != null) { break; } } return connectToServerInternal(instance, ssl, config, path); } public Session connectToServer(Class<?> aClass, WebSocketClient.ConnectionBuilder connectionBuilder) throws DeploymentException, IOException { ConfiguredClientEndpoint config = getClientEndpoint(aClass, true); if (config == null) { throw JsrWebSocketMessages.MESSAGES.notAValidClientEndpointType(aClass); } try { AnnotatedEndpointFactory factory = config.getFactory(); InstanceHandle<?> instance = config.getInstanceFactory().createInstance(); return connectToServerInternal(factory.createInstance(instance), config, connectionBuilder); } catch (InstantiationException e) { throw new RuntimeException(e); } } @Override public Session connectToServer(Class<?> aClass, URI uri) throws DeploymentException, IOException { ConfiguredClientEndpoint config = getClientEndpoint(aClass, true); if (config == null) { throw JsrWebSocketMessages.MESSAGES.notAValidClientEndpointType(aClass); } try { AnnotatedEndpointFactory factory = config.getFactory(); InstanceHandle<?> instance = config.getInstanceFactory().createInstance(); XnioSsl ssl = null; for (WebsocketClientSslProvider provider : clientSslProviders) { ssl = provider.getSsl(xnioWorker, aClass, uri); if (ssl != null) { break; } } return connectToServerInternal(factory.createInstance(instance), ssl, config, uri); } catch (InstantiationException e) { throw new RuntimeException(e); } } @Override public Session connectToServer(final Endpoint endpointInstance, final ClientEndpointConfig config, final URI path) throws DeploymentException, IOException { ClientEndpointConfig cec = config != null ? config : ClientEndpointConfig.Builder.create().build(); XnioSsl ssl = null; for (WebsocketClientSslProvider provider : clientSslProviders) { ssl = provider.getSsl(xnioWorker, endpointInstance, cec, path); if (ssl != null) { break; } } //in theory we should not be able to connect until the deployment is complete, but the definition of when a deployment is complete is a bit nebulous. WebSocketClientNegotiation clientNegotiation = new ClientNegotiation(cec.getPreferredSubprotocols(), toExtensionList(cec.getExtensions()), cec); WebSocketClient.ConnectionBuilder connectionBuilder = WebSocketClient.connectionBuilder(xnioWorker, bufferPool, path) .setSsl(ssl) .setBindAddress(clientBindAddress) .setClientNegotiation(clientNegotiation); return connectToServer(endpointInstance, config, connectionBuilder); } public Session connectToServer(final Endpoint endpointInstance, final ClientEndpointConfig config, WebSocketClient.ConnectionBuilder connectionBuilder) throws DeploymentException, IOException { ClientEndpointConfig cec = config != null ? config : ClientEndpointConfig.Builder.create().build(); WebSocketClientNegotiation clientNegotiation = connectionBuilder.getClientNegotiation(); IoFuture<WebSocketChannel> session = connectionBuilder .connect(); Number timeout = (Number) cec.getUserProperties().get(TIMEOUT); if(session.await(timeout == null ? DEFAULT_WEB_SOCKET_TIMEOUT_SECONDS: timeout.intValue(), TimeUnit.SECONDS) == IoFuture.Status.WAITING) { //add a notifier to close the channel if the connection actually completes session.cancel(); session.addNotifier(new IoFuture.HandlingNotifier<WebSocketChannel, Object>() { @Override public void handleDone(WebSocketChannel data, Object attachment) { IoUtils.safeClose(data); } }, null); throw JsrWebSocketMessages.MESSAGES.connectionTimedOut(); } WebSocketChannel channel; try { channel = session.get(); } catch (UpgradeFailedException e) { throw new DeploymentException(e.getMessage(), e); } EndpointSessionHandler sessionHandler = new EndpointSessionHandler(this); final List<Extension> extensions = new ArrayList<>(); final Map<String, Extension> extMap = new HashMap<>(); for (Extension ext : cec.getExtensions()) { extMap.put(ext.getName(), ext); } for (WebSocketExtension e : clientNegotiation.getSelectedExtensions()) { Extension ext = extMap.get(e.getName()); if (ext == null) { throw JsrWebSocketMessages.MESSAGES.extensionWasNotPresentInClientHandshake(e.getName(), clientNegotiation.getSupportedExtensions()); } extensions.add(ExtensionImpl.create(e)); } EncodingFactory encodingFactory = EncodingFactory.createFactory(classIntrospecter, cec.getDecoders(), cec.getEncoders()); UndertowSession undertowSession = new UndertowSession(channel, connectionBuilder.getUri(), Collections.<String, String>emptyMap(), Collections.<String, List<String>>emptyMap(), sessionHandler, null, new ImmediateInstanceHandle<>(endpointInstance), cec, connectionBuilder.getUri().getQuery(), encodingFactory.createEncoding(cec), new HashSet<Session>(), clientNegotiation.getSelectedSubProtocol(), extensions, connectionBuilder); endpointInstance.onOpen(undertowSession, cec); channel.resumeReceives(); return undertowSession; } @Override public Session connectToServer(final Class<? extends Endpoint> endpointClass, final ClientEndpointConfig cec, final URI path) throws DeploymentException, IOException { try { Endpoint endpoint = classIntrospecter.createInstanceFactory(endpointClass).createInstance().getInstance(); return connectToServer(endpoint, cec, path); } catch (InstantiationException | NoSuchMethodException e) { throw new RuntimeException(e); } } private Session connectToServerInternal(final Endpoint endpointInstance, XnioSsl ssl, final ConfiguredClientEndpoint cec, final URI path) throws DeploymentException, IOException { //in theory we should not be able to connect until the deployment is complete, but the definition of when a deployment is complete is a bit nebulous. WebSocketClientNegotiation clientNegotiation = new ClientNegotiation(cec.getConfig().getPreferredSubprotocols(), toExtensionList(cec.getConfig().getExtensions()), cec.getConfig()); WebSocketClient.ConnectionBuilder connectionBuilder = WebSocketClient.connectionBuilder(xnioWorker, bufferPool, path) .setSsl(ssl) .setBindAddress(clientBindAddress) .setClientNegotiation(clientNegotiation); return connectToServerInternal(endpointInstance, cec, connectionBuilder); } private Session connectToServerInternal(final Endpoint endpointInstance, final ConfiguredClientEndpoint cec, WebSocketClient.ConnectionBuilder connectionBuilder) throws DeploymentException, IOException { IoFuture<WebSocketChannel> session = connectionBuilder .connect(); Number timeout = (Number) cec.getConfig().getUserProperties().get(TIMEOUT); IoFuture.Status result = session.await(timeout == null ? DEFAULT_WEB_SOCKET_TIMEOUT_SECONDS : timeout.intValue(), TimeUnit.SECONDS); if(result == IoFuture.Status.WAITING) { //add a notifier to close the channel if the connection actually completes session.cancel(); session.addNotifier(new IoFuture.HandlingNotifier<WebSocketChannel, Object>() { @Override public void handleDone(WebSocketChannel data, Object attachment) { IoUtils.safeClose(data); } }, null); throw JsrWebSocketMessages.MESSAGES.connectionTimedOut(); } WebSocketChannel channel; try { channel = session.get(); } catch (UpgradeFailedException e) { throw new DeploymentException(e.getMessage(), e); } EndpointSessionHandler sessionHandler = new EndpointSessionHandler(this); final List<Extension> extensions = new ArrayList<>(); final Map<String, Extension> extMap = new HashMap<>(); for (Extension ext : cec.getConfig().getExtensions()) { extMap.put(ext.getName(), ext); } String subProtocol = null; if(connectionBuilder.getClientNegotiation() != null) { for (WebSocketExtension e : connectionBuilder.getClientNegotiation().getSelectedExtensions()) { Extension ext = extMap.get(e.getName()); if (ext == null) { throw JsrWebSocketMessages.MESSAGES.extensionWasNotPresentInClientHandshake(e.getName(), connectionBuilder.getClientNegotiation().getSupportedExtensions()); } extensions.add(ExtensionImpl.create(e)); } subProtocol = connectionBuilder.getClientNegotiation().getSelectedSubProtocol(); } UndertowSession undertowSession = new UndertowSession(channel, connectionBuilder.getUri(), Collections.<String, String>emptyMap(), Collections.<String, List<String>>emptyMap(), sessionHandler, null, new ImmediateInstanceHandle<>(endpointInstance), cec.getConfig(), connectionBuilder.getUri().getQuery(), cec.getEncodingFactory().createEncoding(cec.getConfig()), new HashSet<Session>(), subProtocol, extensions, connectionBuilder); endpointInstance.onOpen(undertowSession, cec.getConfig()); channel.resumeReceives(); return undertowSession; } @Override public long getDefaultMaxSessionIdleTimeout() { return defaultMaxSessionIdleTimeout; } @Override public void setDefaultMaxSessionIdleTimeout(final long timeout) { this.defaultMaxSessionIdleTimeout = timeout; } @Override public int getDefaultMaxBinaryMessageBufferSize() { return defaultMaxBinaryMessageBufferSize; } @Override public void setDefaultMaxBinaryMessageBufferSize(int defaultMaxBinaryMessageBufferSize) { this.defaultMaxBinaryMessageBufferSize = defaultMaxBinaryMessageBufferSize; } @Override public int getDefaultMaxTextMessageBufferSize() { return defaultMaxTextMessageBufferSize; } @Override public void setDefaultMaxTextMessageBufferSize(int defaultMaxTextMessageBufferSize) { this.defaultMaxTextMessageBufferSize = defaultMaxTextMessageBufferSize; } @Override public Set<Extension> getInstalledExtensions() { return Collections.emptySet(); } /** * Runs a web socket invocation, setting up the threads and dispatching a thread pool * <p> * Unfortunately we need to dispatch to a thread pool, because there is a good chance that the endpoint * will use blocking IO methods. We suspend recieves while this is in progress, to make sure that we do not have multiple * methods invoked at once. * <p> * * @param invocation The task to run */ public void invokeEndpointMethod(final Executor executor, final Runnable invocation) { if (dispatchToWorker) { executor.execute(new Runnable() { @Override public void run() { invokeEndpointMethod(invocation); } }); } else { invokeEndpointMethod(invocation); } } /** * Directly invokes an endpoint method, without dispatching to an executor * @param invocation The invocation */ public void invokeEndpointMethod(final Runnable invocation) { ThreadSetupAction.Handle handle = threadSetupAction.setup(null); try { invocation.run(); } finally { handle.tearDown(); } } @Override public void addEndpoint(final Class<?> endpoint) throws DeploymentException { if (deploymentComplete) { throw JsrWebSocketMessages.MESSAGES.cannotAddEndpointAfterDeployment(); } addEndpointInternal(endpoint, true); } private void addEndpointInternal(final Class<?> endpoint, boolean requiresCreation) throws DeploymentException { ServerEndpoint serverEndpoint = endpoint.getAnnotation(ServerEndpoint.class); ClientEndpoint clientEndpoint = endpoint.getAnnotation(ClientEndpoint.class); if (serverEndpoint != null) { JsrWebSocketLogger.ROOT_LOGGER.addingAnnotatedServerEndpoint(endpoint, serverEndpoint.value()); final PathTemplate template = PathTemplate.create(serverEndpoint.value()); if (seenPaths.contains(template)) { PathTemplate existing = null; for (PathTemplate p : seenPaths) { if (p.compareTo(template) == 0) { existing = p; break; } } throw JsrWebSocketMessages.MESSAGES.multipleEndpointsWithOverlappingPaths(template, existing); } seenPaths.add(template); EncodingFactory encodingFactory = EncodingFactory.createFactory(classIntrospecter, serverEndpoint.decoders(), serverEndpoint.encoders()); AnnotatedEndpointFactory annotatedEndpointFactory = AnnotatedEndpointFactory.create(endpoint, encodingFactory, template.getParameterNames()); InstanceFactory<?> instanceFactory = null; try { instanceFactory = classIntrospecter.createInstanceFactory(endpoint); } catch (NoSuchMethodException e) { throw JsrWebSocketMessages.MESSAGES.couldNotDeploy(e); } Class<? extends ServerEndpointConfig.Configurator> configuratorClass = serverEndpoint.configurator(); ServerEndpointConfig.Configurator configurator; if (configuratorClass != ServerEndpointConfig.Configurator.class) { try { configurator = classIntrospecter.createInstanceFactory(configuratorClass).createInstance().getInstance(); } catch (InstantiationException | NoSuchMethodException e) { throw JsrWebSocketMessages.MESSAGES.couldNotDeploy(e); } } else { configurator = DefaultContainerConfigurator.INSTANCE; } ServerEndpointConfig config = ServerEndpointConfig.Builder.create(endpoint, serverEndpoint.value()) .decoders(Arrays.asList(serverEndpoint.decoders())) .encoders(Arrays.asList(serverEndpoint.encoders())) .subprotocols(Arrays.asList(serverEndpoint.subprotocols())) .configurator(configurator) .build(); ConfiguredServerEndpoint confguredServerEndpoint = new ConfiguredServerEndpoint(config, instanceFactory, template, encodingFactory, annotatedEndpointFactory); configuredServerEndpoints.add(confguredServerEndpoint); handleAddingFilterMapping(); } else if (clientEndpoint != null) { JsrWebSocketLogger.ROOT_LOGGER.addingAnnotatedClientEndpoint(endpoint); EncodingFactory encodingFactory = EncodingFactory.createFactory(classIntrospecter, clientEndpoint.decoders(), clientEndpoint.encoders()); InstanceFactory<?> instanceFactory; try { instanceFactory = classIntrospecter.createInstanceFactory(endpoint); } catch (Exception e) { try { instanceFactory = new ConstructorInstanceFactory<>(endpoint.getConstructor()); //this endpoint cannot be created by the container, the user will instantiate it } catch (NoSuchMethodException e1) { if(requiresCreation) { throw JsrWebSocketMessages.MESSAGES.couldNotDeploy(e); } else { instanceFactory = new InstanceFactory<Object>() { @Override public InstanceHandle<Object> createInstance() throws InstantiationException { throw new InstantiationException(); } }; } } } AnnotatedEndpointFactory factory = AnnotatedEndpointFactory.create(endpoint, encodingFactory, Collections.<String>emptySet()); ClientEndpointConfig.Configurator configurator = null; try { configurator = classIntrospecter.createInstanceFactory(clientEndpoint.configurator()).createInstance().getInstance(); } catch (InstantiationException | NoSuchMethodException e) { throw JsrWebSocketMessages.MESSAGES.couldNotDeploy(e); } ClientEndpointConfig config = ClientEndpointConfig.Builder.create() .decoders(Arrays.asList(clientEndpoint.decoders())) .encoders(Arrays.asList(clientEndpoint.encoders())) .preferredSubprotocols(Arrays.asList(clientEndpoint.subprotocols())) .configurator(configurator) .build(); ConfiguredClientEndpoint configuredClientEndpoint = new ConfiguredClientEndpoint(config, factory, encodingFactory, instanceFactory); clientEndpoints.put(endpoint, configuredClientEndpoint); } else { throw JsrWebSocketMessages.MESSAGES.classWasNotAnnotated(endpoint); } } private void handleAddingFilterMapping() { if (contextToAddFilter != null) { contextToAddFilter.getDeployment().getDeploymentInfo().addFilterUrlMapping(Bootstrap.FILTER_NAME, "/*", DispatcherType.REQUEST); contextToAddFilter.getDeployment().getServletPaths().invalidate(); contextToAddFilter = null; } } @Override public void addEndpoint(final ServerEndpointConfig endpoint) throws DeploymentException { if (deploymentComplete) { throw JsrWebSocketMessages.MESSAGES.cannotAddEndpointAfterDeployment(); } JsrWebSocketLogger.ROOT_LOGGER.addingProgramaticEndpoint(endpoint.getEndpointClass(), endpoint.getPath()); final PathTemplate template = PathTemplate.create(endpoint.getPath()); if (seenPaths.contains(template)) { PathTemplate existing = null; for (PathTemplate p : seenPaths) { if (p.compareTo(template) == 0) { existing = p; break; } } throw JsrWebSocketMessages.MESSAGES.multipleEndpointsWithOverlappingPaths(template, existing); } seenPaths.add(template); EncodingFactory encodingFactory = EncodingFactory.createFactory(classIntrospecter, endpoint.getDecoders(), endpoint.getEncoders()); ConfiguredServerEndpoint confguredServerEndpoint = new ConfiguredServerEndpoint(endpoint, null, template, encodingFactory, null); configuredServerEndpoints.add(confguredServerEndpoint); handleAddingFilterMapping(); } private ConfiguredClientEndpoint getClientEndpoint(final Class<?> endpointType, boolean requiresCreation) { Class<?> type = endpointType; while (type != Object.class && type != null && !type.isAnnotationPresent(ClientEndpoint.class)) { type = type.getSuperclass(); } if(type == Object.class || type == null) { return null; } ConfiguredClientEndpoint existing = clientEndpoints.get(type); if (existing != null) { return existing; } synchronized (this) { existing = clientEndpoints.get(type); if (existing != null) { return existing; } if (type.isAnnotationPresent(ClientEndpoint.class)) { try { addEndpointInternal(type, requiresCreation); return clientEndpoints.get(type); } catch (DeploymentException e) { throw new RuntimeException(e); } } return null; } } public void deploymentComplete() { deploymentComplete = true; } public List<ConfiguredServerEndpoint> getConfiguredServerEndpoints() { return configuredServerEndpoints; } public ServletContextImpl getContextToAddFilter() { return contextToAddFilter; } public void setContextToAddFilter(ServletContextImpl contextToAddFilter) { this.contextToAddFilter = contextToAddFilter; } @Override public synchronized void close() { for (ConfiguredServerEndpoint endpoint : configuredServerEndpoints) { for (Session session : endpoint.getOpenSessions()) { try { session.close(new CloseReason(CloseReason.CloseCodes.GOING_AWAY, "")); } catch (Exception e) { JsrWebSocketLogger.ROOT_LOGGER.couldNotCloseOnUndeploy(e); } } } } public Pool<ByteBuffer> getBufferPool() { return bufferPool; } public XnioWorker getXnioWorker() { return xnioWorker; } private static List<WebSocketExtension> toExtensionList(final List<Extension> extensions) { List<WebSocketExtension> ret = new ArrayList<>(); for (Extension e : extensions) { final List<WebSocketExtension.Parameter> parameters = new ArrayList<>(); for (Extension.Parameter p : e.getParameters()) { parameters.add(new WebSocketExtension.Parameter(p.getName(), p.getValue())); } ret.add(new WebSocketExtension(e.getName(), parameters)); } return ret; } private class ClientNegotiation extends WebSocketClientNegotiation { private final ClientEndpointConfig config; public ClientNegotiation(List<String> supportedSubProtocols, List<WebSocketExtension> supportedExtensions, ClientEndpointConfig config) { super(supportedSubProtocols, supportedExtensions); this.config = config; } @Override public void afterRequest(final Map<String, List<String>> headers) { ClientEndpointConfig.Configurator configurator = config.getConfigurator(); if (configurator != null) { final Map<String, List<String>> newHeaders = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (Map.Entry<String, List<String>> entry : headers.entrySet()) { ArrayList<String> arrayList = new ArrayList<>(); arrayList.addAll(entry.getValue()); newHeaders.put(entry.getKey(), arrayList); } configurator.afterResponse(new HandshakeResponse() { @Override public Map<String, List<String>> getHeaders() { return newHeaders; } }); } } @Override public void beforeRequest(Map<String, List<String>> headers) { ClientEndpointConfig.Configurator configurator = config.getConfigurator(); if (configurator != null) { final Map<String, List<String>> newHeaders = new HashMap<>(); for (Map.Entry<String, List<String>> entry : headers.entrySet()) { ArrayList<String> arrayList = new ArrayList<>(); arrayList.addAll(entry.getValue()); newHeaders.put(entry.getKey(), arrayList); } configurator.beforeRequest(newHeaders); headers.clear(); //TODO: more efficient way for (Map.Entry<String, List<String>> entry : newHeaders.entrySet()) { if (!entry.getValue().isEmpty()) { headers.put(entry.getKey(), entry.getValue()); } } } } } public WebSocketReconnectHandler getWebSocketReconnectHandler() { return webSocketReconnectHandler; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal; import java.io.Serializable; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteException; import org.apache.ignite.cluster.ClusterGroup; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.cluster.ClusterTopologyException; import org.apache.ignite.compute.ComputeJob; import org.apache.ignite.compute.ComputeJobAdapter; import org.apache.ignite.compute.ComputeJobResult; import org.apache.ignite.compute.ComputeJobResultPolicy; import org.apache.ignite.compute.ComputeTask; import org.apache.ignite.compute.ComputeTaskSession; import org.apache.ignite.compute.ComputeTaskSessionFullSupport; import org.apache.ignite.compute.ComputeUserUndeclaredException; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.resources.TaskSessionResource; import org.apache.ignite.spi.failover.FailoverContext; import org.apache.ignite.spi.failover.always.AlwaysFailoverSpi; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.apache.ignite.testframework.junits.common.GridCommonTest; /** * Test failover of a task with Node filter predicate. */ @GridCommonTest(group = "Kernal Self") public class GridFailoverTaskWithPredicateSelfTest extends GridCommonAbstractTest { /** First node's name. */ private static final String NODE1 = "NODE1"; /** Second node's name. */ private static final String NODE2 = "NODE2"; /** Third node's name. */ private static final String NODE3 = "NODE3"; /** Predicate to exclude the second node from topology */ private final IgnitePredicate<ClusterNode> p = new IgnitePredicate<ClusterNode>() { @Override public boolean apply(ClusterNode e) { return !NODE2.equals(e.attribute(IgniteNodeAttributes.ATTR_IGNITE_INSTANCE_NAME)); } }; /** Whether delegating fail over node was found or not. */ private final AtomicBoolean routed = new AtomicBoolean(); /** Whether job execution failed with exception. */ private final AtomicBoolean failed = new AtomicBoolean(); /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); cfg.setFailoverSpi(new AlwaysFailoverSpi() { /** {@inheritDoc} */ @Override public ClusterNode failover(FailoverContext ctx, List<ClusterNode> grid) { ClusterNode failoverNode = super.failover(ctx, grid); if (failoverNode != null) routed.set(true); else routed.set(false); return failoverNode; } }); return cfg; } /** * Tests that failover doesn't happen on two-node grid when the Task is applicable only for the first node * and fails on it. * * @throws Exception If failed. */ public void testJobNotFailedOver() throws Exception { failed.set(false); routed.set(false); try { Ignite ignite1 = startGrid(NODE1); Ignite ignite2 = startGrid(NODE2); assert ignite1 != null; assert ignite2 != null; compute(ignite1.cluster().forPredicate(p)).withTimeout(10000).execute(JobFailTask.class.getName(), "1"); } catch (ClusterTopologyException ignored) { failed.set(true); } finally { assertTrue(failed.get()); assertFalse(routed.get()); stopGrid(NODE1); stopGrid(NODE2); } } /** * Tests that failover happens on three-node grid when the Task is applicable for the first node * and fails on it, but is also applicable on another node. * * @throws Exception If failed. */ public void testJobFailedOver() throws Exception { failed.set(false); routed.set(false); try { Ignite ignite1 = startGrid(NODE1); Ignite ignite2 = startGrid(NODE2); Ignite ignite3 = startGrid(NODE3); assert ignite1 != null; assert ignite2 != null; assert ignite3 != null; Integer res = (Integer)compute(ignite1.cluster().forPredicate(p)).withTimeout(10000). execute(JobFailTask.class.getName(), "1"); assert res == 1; } catch (ClusterTopologyException ignored) { failed.set(true); } finally { assertFalse(failed.get()); assertTrue(routed.get()); stopGrid(NODE1); stopGrid(NODE2); stopGrid(NODE3); } } /** * Tests that in case of failover our predicate is intersected with projection * (logical AND is performed). * * @throws Exception If error happens. */ public void testJobNotFailedOverWithStaticProjection() throws Exception { failed.set(false); routed.set(false); try { Ignite ignite1 = startGrid(NODE1); Ignite ignite2 = startGrid(NODE2); Ignite ignite3 = startGrid(NODE3); assert ignite1 != null; assert ignite2 != null; assert ignite3 != null; // Get projection only for first 2 nodes. ClusterGroup nodes = ignite1.cluster().forNodeIds(Arrays.asList( ignite1.cluster().localNode().id(), ignite2.cluster().localNode().id())); // On failover NODE3 shouldn't be taken into account. Integer res = (Integer)compute(nodes.forPredicate(p)).withTimeout(10000). execute(JobFailTask.class.getName(), "1"); assert res == 1; } catch (ClusterTopologyException ignored) { failed.set(true); } finally { assertTrue(failed.get()); assertFalse(routed.get()); stopGrid(NODE1); stopGrid(NODE2); stopGrid(NODE3); } } /** */ @ComputeTaskSessionFullSupport private static class JobFailTask implements ComputeTask<String, Object> { /** */ @TaskSessionResource private ComputeTaskSession ses; /** {@inheritDoc} */ @Override public Map<? extends ComputeJob, ClusterNode> map(List<ClusterNode> subgrid, String arg) { ses.setAttribute("fail", true); return Collections.singletonMap(new ComputeJobAdapter(arg) { /** {@inheritDoc} */ @SuppressWarnings({"RedundantTypeArguments"}) @Override public Serializable execute() { boolean fail; try { fail = ses.<String, Boolean>waitForAttribute("fail", 0); } catch (InterruptedException e) { throw new IgniteException("Got interrupted while waiting for attribute to be set.", e); } if (fail) { ses.setAttribute("fail", false); throw new IgniteException("Job exception."); } // This job does not return any result. return Integer.parseInt(this.<String>argument(0)); } }, subgrid.get(0)); } /** {@inheritDoc} */ @Override public ComputeJobResultPolicy result(ComputeJobResult res, List<ComputeJobResult> received) { if (res.getException() != null && !(res.getException() instanceof ComputeUserUndeclaredException)) return ComputeJobResultPolicy.FAILOVER; return ComputeJobResultPolicy.REDUCE; } /** {@inheritDoc} */ @Override public Object reduce(List<ComputeJobResult> results) { assert results.size() == 1; return results.get(0).getData(); } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2019_09_01; import com.microsoft.azure.SubResource; import java.util.List; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; /** * Backend address pool settings of an application gateway. */ @JsonFlatten public class ApplicationGatewayBackendHttpSettings extends SubResource { /** * The destination port on the backend. */ @JsonProperty(value = "properties.port") private Integer port; /** * The protocol used to communicate with the backend. Possible values * include: 'Http', 'Https'. */ @JsonProperty(value = "properties.protocol") private ApplicationGatewayProtocol protocol; /** * Cookie based affinity. Possible values include: 'Enabled', 'Disabled'. */ @JsonProperty(value = "properties.cookieBasedAffinity") private ApplicationGatewayCookieBasedAffinity cookieBasedAffinity; /** * Request timeout in seconds. Application Gateway will fail the request if * response is not received within RequestTimeout. Acceptable values are * from 1 second to 86400 seconds. */ @JsonProperty(value = "properties.requestTimeout") private Integer requestTimeout; /** * Probe resource of an application gateway. */ @JsonProperty(value = "properties.probe") private SubResource probe; /** * Array of references to application gateway authentication certificates. */ @JsonProperty(value = "properties.authenticationCertificates") private List<SubResource> authenticationCertificates; /** * Array of references to application gateway trusted root certificates. */ @JsonProperty(value = "properties.trustedRootCertificates") private List<SubResource> trustedRootCertificates; /** * Connection draining of the backend http settings resource. */ @JsonProperty(value = "properties.connectionDraining") private ApplicationGatewayConnectionDraining connectionDraining; /** * Host header to be sent to the backend servers. */ @JsonProperty(value = "properties.hostName") private String hostName; /** * Whether to pick host header should be picked from the host name of the * backend server. Default value is false. */ @JsonProperty(value = "properties.pickHostNameFromBackendAddress") private Boolean pickHostNameFromBackendAddress; /** * Cookie name to use for the affinity cookie. */ @JsonProperty(value = "properties.affinityCookieName") private String affinityCookieName; /** * Whether the probe is enabled. Default value is false. */ @JsonProperty(value = "properties.probeEnabled") private Boolean probeEnabled; /** * Path which should be used as a prefix for all HTTP requests. Null means * no path will be prefixed. Default value is null. */ @JsonProperty(value = "properties.path") private String path; /** * The provisioning state of the backend HTTP settings resource. Possible * values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. */ @JsonProperty(value = "properties.provisioningState", access = JsonProperty.Access.WRITE_ONLY) private ProvisioningState provisioningState; /** * Name of the backend http settings that is unique within an Application * Gateway. */ @JsonProperty(value = "name") private String name; /** * A unique read-only string that changes whenever the resource is updated. */ @JsonProperty(value = "etag", access = JsonProperty.Access.WRITE_ONLY) private String etag; /** * Type of the resource. */ @JsonProperty(value = "type", access = JsonProperty.Access.WRITE_ONLY) private String type; /** * Get the destination port on the backend. * * @return the port value */ public Integer port() { return this.port; } /** * Set the destination port on the backend. * * @param port the port value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withPort(Integer port) { this.port = port; return this; } /** * Get the protocol used to communicate with the backend. Possible values include: 'Http', 'Https'. * * @return the protocol value */ public ApplicationGatewayProtocol protocol() { return this.protocol; } /** * Set the protocol used to communicate with the backend. Possible values include: 'Http', 'Https'. * * @param protocol the protocol value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withProtocol(ApplicationGatewayProtocol protocol) { this.protocol = protocol; return this; } /** * Get cookie based affinity. Possible values include: 'Enabled', 'Disabled'. * * @return the cookieBasedAffinity value */ public ApplicationGatewayCookieBasedAffinity cookieBasedAffinity() { return this.cookieBasedAffinity; } /** * Set cookie based affinity. Possible values include: 'Enabled', 'Disabled'. * * @param cookieBasedAffinity the cookieBasedAffinity value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withCookieBasedAffinity(ApplicationGatewayCookieBasedAffinity cookieBasedAffinity) { this.cookieBasedAffinity = cookieBasedAffinity; return this; } /** * Get request timeout in seconds. Application Gateway will fail the request if response is not received within RequestTimeout. Acceptable values are from 1 second to 86400 seconds. * * @return the requestTimeout value */ public Integer requestTimeout() { return this.requestTimeout; } /** * Set request timeout in seconds. Application Gateway will fail the request if response is not received within RequestTimeout. Acceptable values are from 1 second to 86400 seconds. * * @param requestTimeout the requestTimeout value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withRequestTimeout(Integer requestTimeout) { this.requestTimeout = requestTimeout; return this; } /** * Get probe resource of an application gateway. * * @return the probe value */ public SubResource probe() { return this.probe; } /** * Set probe resource of an application gateway. * * @param probe the probe value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withProbe(SubResource probe) { this.probe = probe; return this; } /** * Get array of references to application gateway authentication certificates. * * @return the authenticationCertificates value */ public List<SubResource> authenticationCertificates() { return this.authenticationCertificates; } /** * Set array of references to application gateway authentication certificates. * * @param authenticationCertificates the authenticationCertificates value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withAuthenticationCertificates(List<SubResource> authenticationCertificates) { this.authenticationCertificates = authenticationCertificates; return this; } /** * Get array of references to application gateway trusted root certificates. * * @return the trustedRootCertificates value */ public List<SubResource> trustedRootCertificates() { return this.trustedRootCertificates; } /** * Set array of references to application gateway trusted root certificates. * * @param trustedRootCertificates the trustedRootCertificates value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withTrustedRootCertificates(List<SubResource> trustedRootCertificates) { this.trustedRootCertificates = trustedRootCertificates; return this; } /** * Get connection draining of the backend http settings resource. * * @return the connectionDraining value */ public ApplicationGatewayConnectionDraining connectionDraining() { return this.connectionDraining; } /** * Set connection draining of the backend http settings resource. * * @param connectionDraining the connectionDraining value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withConnectionDraining(ApplicationGatewayConnectionDraining connectionDraining) { this.connectionDraining = connectionDraining; return this; } /** * Get host header to be sent to the backend servers. * * @return the hostName value */ public String hostName() { return this.hostName; } /** * Set host header to be sent to the backend servers. * * @param hostName the hostName value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withHostName(String hostName) { this.hostName = hostName; return this; } /** * Get whether to pick host header should be picked from the host name of the backend server. Default value is false. * * @return the pickHostNameFromBackendAddress value */ public Boolean pickHostNameFromBackendAddress() { return this.pickHostNameFromBackendAddress; } /** * Set whether to pick host header should be picked from the host name of the backend server. Default value is false. * * @param pickHostNameFromBackendAddress the pickHostNameFromBackendAddress value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withPickHostNameFromBackendAddress(Boolean pickHostNameFromBackendAddress) { this.pickHostNameFromBackendAddress = pickHostNameFromBackendAddress; return this; } /** * Get cookie name to use for the affinity cookie. * * @return the affinityCookieName value */ public String affinityCookieName() { return this.affinityCookieName; } /** * Set cookie name to use for the affinity cookie. * * @param affinityCookieName the affinityCookieName value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withAffinityCookieName(String affinityCookieName) { this.affinityCookieName = affinityCookieName; return this; } /** * Get whether the probe is enabled. Default value is false. * * @return the probeEnabled value */ public Boolean probeEnabled() { return this.probeEnabled; } /** * Set whether the probe is enabled. Default value is false. * * @param probeEnabled the probeEnabled value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withProbeEnabled(Boolean probeEnabled) { this.probeEnabled = probeEnabled; return this; } /** * Get path which should be used as a prefix for all HTTP requests. Null means no path will be prefixed. Default value is null. * * @return the path value */ public String path() { return this.path; } /** * Set path which should be used as a prefix for all HTTP requests. Null means no path will be prefixed. Default value is null. * * @param path the path value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withPath(String path) { this.path = path; return this; } /** * Get the provisioning state of the backend HTTP settings resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. * * @return the provisioningState value */ public ProvisioningState provisioningState() { return this.provisioningState; } /** * Get name of the backend http settings that is unique within an Application Gateway. * * @return the name value */ public String name() { return this.name; } /** * Set name of the backend http settings that is unique within an Application Gateway. * * @param name the name value to set * @return the ApplicationGatewayBackendHttpSettings object itself. */ public ApplicationGatewayBackendHttpSettings withName(String name) { this.name = name; return this; } /** * Get a unique read-only string that changes whenever the resource is updated. * * @return the etag value */ public String etag() { return this.etag; } /** * Get type of the resource. * * @return the type value */ public String type() { return this.type; } }
package tranquvis.simplesmsremote.Helper; import android.app.Notification; import android.app.NotificationChannel; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.res.Resources; import android.net.Uri; import android.os.Build; import android.support.annotation.RequiresApi; import android.support.v4.app.NotificationCompat; import org.apache.commons.lang3.StringUtils; import java.util.ArrayList; import java.util.List; import java.util.Random; import tranquvis.simplesmsremote.Activities.MainActivity; import tranquvis.simplesmsremote.CommandManagement.CommandExecResult; import tranquvis.simplesmsremote.R; import tranquvis.simplesmsremote.Sms.MyCommandMessage; /** * Created by Andreas Kaltenleitner on 24.08.2016. */ public class MyNotificationManager { private static final int CODE_NOTIFICATION_CLICK_SMS_COMMAND_RECEIVED = 1; private static final int CODE_NOTIFICATION_CLICK_RECEIVER_START_FAILED_AFTER_BOOT = 2; private static final int CODE_NOTIFICATION_CLICK_PERMANENT_STATUS = 3; private static final int CODE_NOTIFICATION_PLAY_SOUND_STATUS = 4; private static final int NOTIFICATION_ID_START_RECEIVER_AFTER_BOOT_FAILED = 1; private static MyNotificationManager ourInstance; private Context context; private NotificationManager nm; public static class DefaultNotificationChannel { static final String ID = "Default"; static final String NAME = "Default"; @RequiresApi(api = Build.VERSION_CODES.O) static NotificationChannel Create() { return new NotificationChannel(ID, NAME, NotificationManager.IMPORTANCE_HIGH); } static void Init(NotificationManager manager) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) return; manager.createNotificationChannel(Create()); } } public static class ReceiverNotificationChannel { static final String ID = "Receiver"; static final String NAME = "Sms Receiver"; @RequiresApi(api = Build.VERSION_CODES.O) static NotificationChannel Create() { return new NotificationChannel(ID, NAME, NotificationManager.IMPORTANCE_MIN); } static void Init(NotificationManager manager) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) return; manager.createNotificationChannel(Create()); } } private MyNotificationManager(Context context) { this.context = context; nm = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); } public static MyNotificationManager getInstance(Context context) { if (ourInstance == null) ourInstance = new MyNotificationManager(context); return ourInstance; } public void initChannels() { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) return; DefaultNotificationChannel.Init(nm); ReceiverNotificationChannel.Init(nm); } private int getNextNotificationId(){ final int startId = 100; return new Random().nextInt(Integer.MAX_VALUE - startId) + startId; } public void notifySmsCommandsExecuted(MyCommandMessage commandMessage, List<CommandExecResult> executionResults) { final Resources res = context.getResources(); final String tag = "SmsCommandsReceived"; final String title = res.getString(R.string.notification_sms_command_received); List<String> resultMessages = new ArrayList<>(); for (CommandExecResult execResult : executionResults) { if (execResult.getCustomResultMessage() != null) { resultMessages.add("[info] " + execResult.getCustomResultMessage()); } else if (execResult.isSuccess()) { resultMessages.add("[success] " + execResult.getCommandInstance().getCommandText()); } else { resultMessages.add("[failed] " + execResult.getCommandInstance().getCommandText()); } } String text = StringUtils.join(resultMessages, "\r\n"); DefaultNotificationChannel.Init(nm); final NotificationCompat.Builder builder = new NotificationCompat.Builder(context, DefaultNotificationChannel.ID) .setDefaults(Notification.DEFAULT_ALL) .setSmallIcon(R.drawable.ic_launcher_foreground) .setColor(res.getColor(R.color.colorPrimary)) .setContentTitle(title) .setContentText(text) .setPriority(NotificationCompat.PRIORITY_MAX) .setContentIntent(PendingIntent.getActivity(context, CODE_NOTIFICATION_CLICK_SMS_COMMAND_RECEIVED, new Intent(context, MainActivity.class), 0)) .setStyle(new NotificationCompat.BigTextStyle().bigText(text) .setBigContentTitle(title)); nm.notify(tag, getNextNotificationId(), builder.build()); } public void notifyStartReceiverAfterBootFailed() { final Resources res = context.getResources(); final String tag = "StartSmsReceiverAfterBootFailed"; final String title = res.getString(R.string.notification_title_start_receiver_after_boot_failed); String text = res.getString(R.string.notification_content_start_receiver_after_boot_failed); DefaultNotificationChannel.Init(nm); final NotificationCompat.Builder builder = new NotificationCompat.Builder(context, DefaultNotificationChannel.ID) .setDefaults(0) .setSmallIcon(R.drawable.ic_launcher_foreground) .setColor(res.getColor(R.color.colorPrimary)) .setContentTitle(title) .setContentText(text) .setPriority(NotificationCompat.PRIORITY_DEFAULT) .setContentIntent(PendingIntent.getActivity(context, CODE_NOTIFICATION_CLICK_RECEIVER_START_FAILED_AFTER_BOOT, new Intent(context, MainActivity.class), 0)); nm.notify(tag, NOTIFICATION_ID_START_RECEIVER_AFTER_BOOT_FAILED, builder.build()); } public Notification getPermanentStatusNotification() { final Resources res = context.getResources(); final String title = res.getString(R.string.notification_title_permanent_status); String text = res.getString(R.string.notification_content_permanent_status); ReceiverNotificationChannel.Init(nm); final NotificationCompat.Builder builder = new NotificationCompat.Builder(context, ReceiverNotificationChannel.ID) .setDefaults(0) .setSmallIcon(R.drawable.ic_launcher_foreground) .setColor(res.getColor(R.color.colorPrimary)) .setContentTitle(title) .setContentText(text) .setPriority(NotificationCompat.PRIORITY_MIN) .setOngoing(true) .setContentIntent(PendingIntent.getActivity(context, CODE_NOTIFICATION_CLICK_PERMANENT_STATUS, new Intent(context, MainActivity.class), 0)); return builder.build(); } public Notification getPlaySoundStatusNotification( Uri sound, int timeout, PendingIntent stopIntent) { final Resources res = context.getResources(); final String title = res.getString(R.string.notification_title_play_sound); DefaultNotificationChannel.Init(nm); final NotificationCompat.Builder builder = new NotificationCompat.Builder(context, DefaultNotificationChannel.ID) .setDefaults(0) .setSmallIcon(R.drawable.ic_launcher_foreground) .setColor(res.getColor(R.color.colorPrimary)) .setContentTitle(title) .setContentText(sound.getLastPathSegment()) .setPriority(NotificationCompat.PRIORITY_HIGH) .setOngoing(true) .setTimeoutAfter(timeout) .addAction(R.drawable.outline_cancel_black_36dp, context.getString(R.string.simple_cancel), stopIntent) .setContentIntent(PendingIntent.getActivity(context, CODE_NOTIFICATION_PLAY_SOUND_STATUS, new Intent(context, MainActivity.class), 0)); return builder.build(); } }
package net.sourceforge.mayfly.dump; import junit.framework.TestCase; import net.sourceforge.mayfly.Database; import net.sourceforge.mayfly.MayflyException; import net.sourceforge.mayfly.MayflyInternalException; import net.sourceforge.mayfly.UnimplementedException; import net.sourceforge.mayfly.acceptance.SqlTestCase; import net.sourceforge.mayfly.datastore.DataStore; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; import java.sql.PreparedStatement; public class SqlDumperTest extends TestCase { private Database database; @Override protected void setUp() throws Exception { database = new Database(); } public void testEmpty() throws Exception { assertEquals("", new SqlDumper().dump(new Database().dataStore())); } public void testWriter() throws Exception { StringWriter out = new StringWriter(); new SqlDumper().dump(new DataStore(), out); assertEquals("", out.toString()); } public void testTable() throws Exception { database.execute("create table foo(a integer)"); assertEquals("CREATE TABLE foo(\n a INTEGER\n);\n\n", dump()); } public void testTwoColumns() throws Exception { database.execute("create table foo(a integer, B Integer)"); assertEquals("CREATE TABLE foo(\n" + " a INTEGER,\n" + " B INTEGER\n" + ");\n\n", dump()); } public void testTwoTables() throws Exception { database.execute("create table foo(a integer)"); database.execute("create table bar(b integer)"); assertEquals( "CREATE TABLE bar(\n" + " b INTEGER\n" + ");\n" + "\n" + "CREATE TABLE foo(\n" + " a INTEGER\n" + ");\n" + "\n", dump()); } public void testDataTypes() throws Exception { database.execute("create table Foo(" + "b varchar ( 0243 ) ," + "c timestamp," + "d date," + "e text," + "f decimal ( 7 , 5 ), " + "g blob ( 32800)," + "h blob" + ")"); assertEquals( "CREATE TABLE Foo(\n" + " b VARCHAR(243),\n" + " c TIMESTAMP,\n" + " d DATE,\n" + " e TEXT,\n" + " f DECIMAL(7,5),\n" + " g BLOB(32800),\n" + " h BLOB\n" + ");\n\n", dump() ); } public void testIntegerDataTypes() throws Exception { database.execute("create table Foo(" + "a integer," + "b int ," + "c tinyint," + "d smallint," + "e bigint," + "f identity," + "g serial" + ")"); assertEquals( "CREATE TABLE Foo(\n" + " a INTEGER,\n" + // The prevailing concept here mostly seems to be to canonicalize. " b INTEGER,\n" + " c TINYINT,\n" + " d SMALLINT,\n" + " e BIGINT,\n" + " f INTEGER AUTO_INCREMENT,\n" + " g INTEGER GENERATED BY DEFAULT AS IDENTITY\n" + ");\n\n", dump() ); } public void testRow() throws Exception { database.execute("create table foo(a integer)"); database.execute("insert into foo(a) values(5)"); assertEquals("CREATE TABLE foo(\n a INTEGER\n);\n\n" + "INSERT INTO foo(a) VALUES(5);\n\n", dump()); } public void testSeveralColumns() throws Exception { database.execute("create table foo(a integer, b integer)"); database.execute("insert into foo(a, b) values(5, 8)"); assertEquals("CREATE TABLE foo(\n a INTEGER,\n b INTEGER\n);\n\n" + "INSERT INTO foo(a, b) VALUES(5, 8);\n\n", dump()); } public void testSeveralRows() throws Exception { database.execute("create table foo(a integer)"); database.execute("insert into foo(a) values(5)"); database.execute("insert into foo(a) values(6)"); assertEquals("CREATE TABLE foo(\n a INTEGER\n);\n\n" + "INSERT INTO foo(a) VALUES(5);\n" + "INSERT INTO foo(a) VALUES(6);\n\n", dump()); } public void testRowsForSeveralTables() throws Exception { database.execute("create table foo(a integer)"); database.execute("create table empty(a integer)"); database.execute("create table bar(a integer)"); database.execute("insert into foo(a) values(5)"); database.execute("insert into bar(a) values(51)"); database.execute("insert into bar(a) values(52)"); assertEquals( "CREATE TABLE bar(\n a INTEGER\n);\n\n" + "CREATE TABLE empty(\n a INTEGER\n);\n\n" + "CREATE TABLE foo(\n a INTEGER\n);\n\n" + "INSERT INTO bar(a) VALUES(51);\n" + "INSERT INTO bar(a) VALUES(52);\n" + "\n" + "INSERT INTO foo(a) VALUES(5);\n" + "\n", dump()); } public void testDataOfVariousTypes() throws Exception { database.execute("create table foo(a bigint, b decimal(23,1)," + "c varchar(255), d date, e timestamp)"); database.execute("insert into foo values(" + "888111222333, 999888111222333.5, 'c''est', '2004-11-04'," + " '2000-02-29 13:45:01' )"); assertEquals("INSERT INTO foo(a, b, c, d, e) VALUES(" + "888111222333, 999888111222333.5, 'c''est', '2004-11-04', " + "'2000-02-29 13:45:01');\n\n", dumpData()); } public void testBinaryData() throws Exception { database.execute("create table foo(a blob)"); PreparedStatement statement = database.openConnection().prepareStatement( "insert into foo(a) values(?)"); statement.setBytes(1, new byte[] { 0, 1, 2, 127, 77, (byte) 200, (byte) 255, 0}); statement.executeUpdate(); assertEquals("INSERT INTO foo(a) VALUES(x'0001027f4dc8ff00');\n\n", dumpData()); } public void testDumpDefinition() throws Exception { database.execute("create table foo(a int)"); database.execute("insert into foo(a) values(5)"); StringWriter out = new StringWriter(); new SqlDumper().definition(database.dataStore(), out); assertEquals("CREATE TABLE foo(\n a INTEGER\n);\n\n", out.toString()); } public void testNullAndDefault() throws Exception { database.execute("create table foo(a integer default 5, b integer)"); database.execute("insert into foo() values()"); assertEquals( "CREATE TABLE foo(\n a INTEGER DEFAULT 5,\n b INTEGER\n);\n\n" + "INSERT INTO foo(a, b) VALUES(5, null);\n\n", dump()); } public void testNotNull() throws Exception { database.execute("create table foo(a integer not null)"); assertEquals( "CREATE TABLE foo(\n a INTEGER NOT NULL\n);\n\n", dump()); } public void testPrimaryKey() throws Exception { database.execute("create table foo(a integer primary key)"); database.execute("create table bar(" + "a integer, b integer, primary key(a, b))"); assertEquals( "CREATE TABLE bar(\n" + " a INTEGER,\n b INTEGER,\n PRIMARY KEY(a, b)\n);\n\n" + "CREATE TABLE foo(\n a INTEGER,\n PRIMARY KEY(a)\n);\n\n", dump()); } public void testUnique() throws Exception { database.execute("create table foo(a integer, b integer, c integer," + "unique(a), unique(b, c))"); assertEquals( "CREATE TABLE foo(\n a INTEGER,\n b INTEGER,\n c INTEGER,\n" + " UNIQUE(a),\n UNIQUE(b, c)\n);\n\n", dump()); } public void testUniqueIndex() throws Exception { database.execute("create table foo(a integer, b integer, c integer)"); database.execute("create unique index x on foo(b, c)"); /* Need to distinguish a unique index from an index and a constraint so that DROP INDEX can drop the constraint for the former but not for the latter. */ assertEquals( "CREATE TABLE foo(\n" + " a INTEGER,\n" + " b INTEGER,\n" + " c INTEGER\n" + ");\n" + "CREATE UNIQUE INDEX x ON foo(b, c);\n\n", dump()); checkRoundTrip(database.dataStore()); } public void testConstraintNames() throws Exception { database.execute("create table name(a integer primary key)"); database.execute("create table name2(" + "a integer, b integer, c integer," + "constraint a_key foreign key(a) references name(a)," + "constraint b_key primary key(b)," + "constraint c_uniq unique(c))"); assertEquals( "CREATE TABLE name(\n a INTEGER,\n PRIMARY KEY(a)\n);\n\n" + "CREATE TABLE name2(\n a INTEGER,\n b INTEGER,\n c INTEGER,\n" + " CONSTRAINT a_key FOREIGN KEY(a) REFERENCES name(a),\n" + " CONSTRAINT b_key PRIMARY KEY(b),\n" + " CONSTRAINT c_uniq UNIQUE(c)\n);\n\n", dump()); } public void testForeignKeyOnDelete() throws Exception { database.execute("create table refd(a integer primary key)"); database.execute("create table refr(d integer," + "foreign key(d) references refd(a) " + "on delete set null on update no action)"); assertEquals( "CREATE TABLE refd(\n a INTEGER,\n PRIMARY KEY(a)\n);\n\n" + "CREATE TABLE refr(\n d INTEGER,\n" + " CONSTRAINT refr_ibfk_1 FOREIGN KEY(d) REFERENCES refd(a) " + "ON DELETE SET NULL\n" + ");\n\n", dump() ); } public void testForeignKeyOnUpdate() throws Exception { database.execute("create table refd(a integer primary key)"); try { database.execute("create table refr(d integer," + "foreign key(d) references refd(a) " + "on update set default)"); // Here's where we'd dump it and assert it came out right. fail(); } catch (MayflyException e) { assertEquals("ON UPDATE SET DEFAULT not implemented", e.getMessage()); } } public void testCheckConstraint() throws Exception { database.execute("create table chk(a integer, check(a <> 55))"); try { assertEquals( "CREATE TABLE chk(\n a INTEGER,\n CHECK(a <> 55)\n);\n\n", dump() ); fail("Hmm, looks like dumping check constaints got implemented"); } catch (UnimplementedException expected) { } } public void testIndex() throws Exception { database.execute("create table foo(a integer)"); database.execute("create index i on foo(a)"); assertEquals( "CREATE TABLE foo(\n a INTEGER\n);\n" + "CREATE INDEX i ON foo(a);\n\n", dump() ); checkRoundTrip(database.dataStore()); } public void testTwoIndexes() throws Exception { database.execute("create table foo(a integer, b integer)"); database.execute("create index a_index on foo(a)"); database.execute("create index b_index on foo(b)"); database.execute("insert into foo(a, b) values(5, 7)"); assertEquals( "CREATE TABLE foo(\n" + " a INTEGER,\n" + " b INTEGER\n" + ");\n" + "CREATE INDEX a_index ON foo(a);\n" + "CREATE INDEX b_index ON foo(b);\n\n" + "INSERT INTO foo(a, b) VALUES(5, 7);\n\n", dump() ); checkRoundTrip(database.dataStore()); } public void testMysqlSyntaxIndex() throws Exception { database.execute("create table foo(a integer, index(a))"); assertEquals( "CREATE TABLE foo(\n a INTEGER\n);\n" + "CREATE INDEX an_index ON foo(a);\n\n", dump() ); } public void testOnUpdateValue() throws Exception { database.execute("create table onup(a integer on update 5)"); assertEquals("CREATE TABLE onup(\n a INTEGER ON UPDATE 5\n);\n\n", dump()); } public void testAutoIncrementNoData() throws Exception { database.execute("create table incr2(a integer auto_increment)"); assertEquals( "CREATE TABLE incr2(\n a INTEGER AUTO_INCREMENT\n);\n\n", dump()); } public void testAutoIncrement() throws Exception { database.execute("create table incr(a integer auto_increment not null," + "b varchar(255))"); database.execute("insert into incr(b) values('before dump')"); database.execute("insert into incr(a, b) values(7, 'seven')"); String dump0 = dump(); assertEquals( "CREATE TABLE incr(\n" + " a INTEGER DEFAULT 8 AUTO_INCREMENT NOT NULL,\n" + " b VARCHAR(255)\n" + ");\n\n" + "INSERT INTO incr(a, b) VALUES(1, 'before dump');\n" + "INSERT INTO incr(a, b) VALUES(7, 'seven');\n\n", dump0); Database database2 = load(dump0); database.execute("insert into incr(b) values('after dump')"); database2.execute("insert into incr(b) values('after dump')"); String dump1 = dump(); String dump2 = new SqlDumper().dump(database2.dataStore()); assertEquals(dump1, dump2); SqlTestCase.assertResultSet(new String[] { "1, 'before dump' ", "8, 'after dump' ", "7, 'seven' " }, database.query("select a, b from incr")); } public void testAutoIncrementDoesNotDumpDefaultOnNewTable() throws Exception { database.execute( "create table incr(a integer auto_increment not null," + "b varchar(255))"); assertEquals( "CREATE TABLE incr(\n" + " a INTEGER AUTO_INCREMENT NOT NULL,\n" + " b VARCHAR(255)\n" + ");\n\n", dump()); database.execute("insert into incr(b) values('short-lived')"); database.execute("delete from incr"); assertEquals( "CREATE TABLE incr(\n" + " a INTEGER DEFAULT 2 AUTO_INCREMENT NOT NULL,\n" + " b VARCHAR(255)\n" + ");\n\n", dump()); } public void testAutoIncrementDoesDumpDefaultOnAlteredTable() throws Exception { database.execute( "create table incr(a integer not null," + "b varchar(255))"); database.execute("insert into incr(a, b) values(1, 'a row')"); database.execute("alter table incr modify column " + "a integer auto_increment not null"); assertEquals( "CREATE TABLE incr(\n" + " a INTEGER DEFAULT 2 AUTO_INCREMENT NOT NULL,\n" + " b VARCHAR(255)\n" + ");\n\n" + "INSERT INTO incr(a, b) VALUES(1, 'a row');\n\n", dump()); database.execute("delete from incr"); assertEquals( "CREATE TABLE incr(\n" + " a INTEGER DEFAULT 2 AUTO_INCREMENT NOT NULL,\n" + " b VARCHAR(255)\n" + ");\n\n", dump()); } public void testSql2003AutoIncrement() throws Exception { database.execute("create table incr(" + "a integer generated by default as identity not null," + "b varchar(255))"); database.execute("insert into incr(a, b) values(7, 'seven')"); database.execute("insert into incr(b) values('before dump')"); String dump = dump(); assertEquals( "CREATE TABLE incr(\n" + " a INTEGER GENERATED BY DEFAULT AS IDENTITY(START WITH 2) NOT NULL,\n" + " b VARCHAR(255)\n" + ");\n\n" + "INSERT INTO incr(a, b) VALUES(1, 'before dump');\n" + "INSERT INTO incr(a, b) VALUES(7, 'seven');\n\n", dump); Database database2 = load(dump); database.execute("insert into incr(b) values('after dump')"); database2.execute("insert into incr(b) values('after dump')"); String dump1 = dump(); String dump2 = new SqlDumper().dump(database2.dataStore()); assertEquals(dump1, dump2); SqlTestCase.assertResultSet(new String[] { "1, 'before dump' ", "2, 'after dump' ", "7, 'seven' " }, database.query("select a, b from incr")); } public void testAutoIncrementNoNextValue() throws Exception { database.execute("create table incr4(a integer auto_increment)"); String before = new SqlDumper(false).dump(database.dataStore()); database.execute("insert into incr4(a) values(5)"); database.execute("delete from incr4"); assertEquals( "CREATE TABLE incr4(\n a INTEGER DEFAULT 6 AUTO_INCREMENT\n);\n\n", new SqlDumper().dump(database.dataStore())); String after = new SqlDumper(false).dump(database.dataStore()); assertEquals( "CREATE TABLE incr4(\n a INTEGER AUTO_INCREMENT\n);\n\n", after); assertEquals(before, after); } /** * See {@link #testAutoIncrementNoNextValue()} for the motivation. */ public void testSql2003IdentityNoNextValue() throws Exception { database.execute("create table incr5(" + "a integer generated by default as identity(start with 7))"); database.execute("insert into incr5() values()"); database.execute("delete from incr5"); assertEquals( "CREATE TABLE incr5(\n a INTEGER " + "GENERATED BY DEFAULT AS IDENTITY(START WITH 8)\n);\n\n", new SqlDumper().dump(database.dataStore())); assertEquals( "CREATE TABLE incr5(\n a INTEGER " + "GENERATED BY DEFAULT AS IDENTITY\n);\n\n", new SqlDumper(false).dump(database.dataStore())); } public void testCurrentTimestamp() throws Exception { database.execute("create table nowish(" + "a timestamp default current_timestamp " + "on update current_timestamp " + ")"); assertEquals( "CREATE TABLE nowish(\n" + " a TIMESTAMP DEFAULT CURRENT_TIMESTAMP " + "ON UPDATE CURRENT_TIMESTAMP\n" + ");\n\n", dump()); } public void testQuotedIdentifiers() throws Exception { database.execute("create table \"join\" (" + "\"null\" integer, \"=\" integer, \"\u00a1\" integer," + "\"nonquote\" integer)"); assertEquals("CREATE TABLE \"join\"(\n" + " \"null\" INTEGER,\n" + " \"=\" INTEGER,\n" + " \"\u00a1\" INTEGER,\n" + " nonquote INTEGER\n" + ");\n\n", dump()); } public void testIdentifier() throws Exception { assertEquals("\"integer\"", identifier("integer")); assertEquals("foo", identifier("foo")); assertEquals("\"<\"", identifier("<")); assertEquals("\"0foo\"", identifier("0foo")); assertEquals("foo7", identifier("foo7")); assertEquals("foo_bar", identifier("foo_bar")); assertEquals("\"foo&bar\"", identifier("foo&bar")); try { identifier(""); fail(); } catch (MayflyInternalException e) { assertEquals("shouldn't have empty string as identifier", e.getMessage()); } try { identifier("\""); fail(); } catch (MayflyException e) { assertEquals("don't know how to dump identifier " + "containing a double quote", e.getMessage()); } } private String identifier(String in) throws IOException { StringWriter out = new StringWriter(); SqlDumper.identifier(in, out); return out.toString(); } public void testRoundTrip() throws Exception { database.execute("create table foo(a integer default 5," + "b varchar(255) not null," + "c bigint default 88 not null," + "d decimal(7,1)," + "e timestamp default current_timestamp," + "primary key(b, c)," + "unique(d))"); database.execute("insert into foo(b) values('hi')"); database.execute("create table bar(a bigint, b decimal(23,1)," + "c varchar(255), d date, e timestamp)"); database.execute("insert into bar values(" + "888111222333, 999888111222333.5, 'c''est', '2004-11-04'," + " '2000-02-29 13:45:01' )"); database.execute("create table name(a integer primary key)"); database.execute("create table name2(" + "a integer, b integer, c integer," + "constraint a_key foreign key(a) references name(a)," + "constraint b_key primary key(b)," + "constraint c_uniq unique(c))"); database.execute("create table refd(a integer primary key)"); database.execute("create table refr(d integer," + "foreign key(d) references refd(a) " + "on delete set null on update no action)"); database.execute("create table onup(" + "a integer default 7 on update 5 not null)"); database.execute("create table binary_table(a blob)"); database.execute( "insert into binary_table(a) values(x'0001027f4dc8ff00')"); database.execute("create table \"join\" (" + "\"null\" integer, \"=\" integer, \"\u00a1\" integer," + "\"nonquote\" integer)"); database.execute("create table incr(a integer auto_increment not null," + "b varchar(255))"); database.execute("insert into incr(a, b) values(7, 'seven')"); database.execute("insert into incr(b) values('before dump')"); database.execute("create table incr_seq(" + "a integer generated by default as identity not null," + "b varchar(255))"); database.execute("insert into incr_seq(a, b) values(7, 'seven')"); database.execute("insert into incr_seq(b) values('before dump')"); database.execute("create table nowish(" + "a timestamp default current_timestamp " + "on update current_timestamp " + ")"); database.execute("create table incr4(a integer auto_increment)"); database.execute("insert into incr4(a) values(5)"); database.execute("delete from incr4"); database.execute("create table incr5(" + "a integer generated by default as identity(start with 7))"); database.execute("insert into incr5() values()"); database.execute("delete from incr5"); database.execute("create table indexed(b varchar(255), c integer)"); database.execute("create unique index index1 on indexed(b, c)"); // Optionally load the large SQL file of your choice here checkRoundTrip(database.dataStore()); } public void testOrderOfForeignKeys() throws Exception { database.execute("create table aa_refr(a_id integer)"); database.execute("create table bb_refd(a integer primary key)"); database.execute( "alter table aa_refr add foreign key(a_id) references bb_refd(a)"); checkRoundTrip(database.dataStore()); } public void testMoveToEndOnAddForeignKeysDoesNotSuffice() throws Exception { /* bb refers to aa, cc refers to bb */ /* The simple algorithm of just moving bb to after aa, or to * the end, would not suffice. */ database.execute("create table order1_bb(" + "a_id integer, b integer primary key)"); database.execute("create table order2_cc(b_id integer," + "foreign key(b_id) references order1_bb(b))"); database.execute("create table order3_aa(a integer primary key)"); database.execute( "alter table order1_bb add foreign key(a_id) references order3_aa(a)"); checkRoundTrip(database.dataStore()); } public void testCircularForeignKeys() throws Exception { database.execute("create table bb(a_id integer, b integer primary key)"); database.execute("create table aa(b_id integer, a integer primary key)"); database.execute( "alter table bb add foreign key(a_id) references aa(a)"); database.execute( "alter table aa add foreign key(b_id) references bb(b)"); try { new SqlDumper().dump(database.dataStore()); fail(); } catch (MayflyException e) { assertEquals( // "cannot dump: circular reference between tables bb and aa", "cannot dump: circular foreign key references between tables", e.getMessage()); } } public void testSelfReference() throws Exception { database.execute("create table foo(id integer," + "parent integer," + "foreign key(parent) references foo(id))"); checkRoundTrip(database.dataStore()); } public void testTwoForeignKeysFromBarToFoo() throws Exception { database.execute("create table foo(a integer unique, b integer unique)"); database.execute("create table bar(a_id integer," + "foreign key(a_id) references foo(a)," + "b_id integer," + "foreign key(b_id) references foo(b))"); checkRoundTrip(database.dataStore()); } public void testRowOrderWithForeignKeys() throws Exception { database.execute("create table aa(a integer primary key, parent integer," + "foreign key(parent) references aa(a))"); database.execute("insert into aa(a, parent) values(31, null)"); database.execute("insert into aa(a, parent) values(1000, null)"); database.execute("insert into aa(a, parent) values(11, 1000)"); database.execute("insert into aa(a, parent) values(12, 1000)"); database.execute("insert into aa(a, parent) values(24, 11)"); database.execute("update aa set parent = 24 where a = 31"); checkRoundTrip(database.dataStore()); } public void testRowSatisfiesItsOwnConstraint() throws Exception { database.execute("create table self(id integer primary key," + "parent integer," + "foreign key(parent) references self(id))"); database.execute("insert into self(id, parent) values(1, 1)"); checkRoundTrip(database.dataStore()); } public void testDifferentConstraintsImplyDifferentOrders() throws Exception { // A particularly odd type of cycle. database.execute("create table foo(a integer unique," + "b integer unique," + "a_ref integer," + "foreign key(a_ref) references foo(a)," + "b_ref integer," + "foreign key(b_ref) references foo(b)" + ")"); database.execute( "insert into foo(a, b, a_ref, b_ref) values (1, 2, null, null)"); database.execute( "insert into foo(a, b, a_ref, b_ref) values (8, 9, 1, null)"); database.execute("update foo set b_ref = 9 where a = 1"); try { dump(); fail(); } catch (MayflyException e) { assertEquals( "cannot dump: circular reference between rows in table foo", e.getMessage()); } } public void testAlterTableUsingAfterDump() throws Exception { database.execute("create table foo(a integer, b integer, c integer)"); database.execute("insert into foo values(1, 2, 3)"); String before = new SqlDumper().dump(database.dataStore()); database.execute("alter table foo drop column b"); database.execute("alter table foo add column b integer after a"); database.execute("update foo set b = 2"); String after = new SqlDumper().dump(database.dataStore()); assertEquals(before, after); } public void testCircularRowsWithForeignKeys() throws Exception { database.execute("create table aa(a integer primary key, parent integer," + "foreign key(parent) references aa(a))"); database.execute("insert into aa(a, parent) values(1, null)"); database.execute("insert into aa(a, parent) values(2, 1)"); database.execute("update aa set parent = 2 where a = 1"); try { new SqlDumper().dump(database.dataStore()); fail(); } catch (MayflyException e) { assertEquals( // Would be nice to say something about which rows... // "cannot dump: circular reference between " + // "rows with a 1 and 2 in table aa", "cannot dump: circular reference between rows in table aa", e.getMessage()); } } public void testCompare() throws Exception { assertCompareEqual("create TABLE\n aa(a integer)", "CREATE table aa ( a int ) "); } public void testCompareCase() throws Exception { String lowercase = dump(load("create TABLE\n aa(a integer)")); assertEquals("CREATE TABLE aa(\n a INTEGER\n);\n\n", lowercase); String uppercase = dump(load("CREATE table AA ( a int ) ")); assertEquals("CREATE TABLE AA(\n a INTEGER\n);\n\n", uppercase); } public void testTableOrder() throws Exception { assertCompareEqual( "create table aa(a integer);" + "create table bb(b integer);", "create table bb(b integer);" + "create table aa(a integer)"); } public void testCaseDoesNotAffectTableOrder() throws Exception { TableNode aa = new TableNode("aa"); TableNode bb = new TableNode("BB"); TableNode upperAa = new TableNode("AA"); assertTrue(aa.backupOrdering(bb) < 0); assertTrue(bb.backupOrdering(aa) > 0); assertTrue(upperAa.backupOrdering(bb) < 0); assertTrue(bb.backupOrdering(upperAa) > 0); /* We don't find it interesting how aa compares to upperAa, because they won't both be in the same schema. */ } public void testRowOrder() throws Exception { assertCompareEqual( "create table aa(a integer);" + "insert into aa(a) values(7);" + "insert into aa(a) values(4);" + "insert into aa(a) values(5);" + "insert into aa(a) values(2);" + "insert into aa(a) values(3);" + "insert into aa(a) values(1);", "create table aa(a integer);" + "insert into aa(a) values(5);" + "insert into aa(a) values(1);" + "insert into aa(a) values(3);" + "insert into aa(a) values(7);" + "insert into aa(a) values(4);" + "insert into aa(a) values(2);" ); } public void testIdenticalRows() throws Exception { try { dump(load( "create table aa(a integer, b varchar(255));" + "insert into aa(a, b) values(7, 'hi');" + "insert into aa(a, b) values(7, 'hi');" )); fail(); } catch (MayflyException e) { assertEquals("cannot dump: table aa has duplicate rows", e.getMessage()); } } private void assertCompareEqual(String first, String second) { assertEquals(dump(load(first)), dump(load(second))); } private Database load(String second) { Database aDatabase = new Database(); aDatabase.executeScript(new StringReader(second)); return aDatabase; } /** * From a datastore, dump it, then load from that dump, * dump again, and compare the two dumps. * * This is a somewhat weak test in that if the dump does something wrong, * it quite possibly will do the same thing wrong in both dumps. But it * does catch things like dump files which won't load because tables/rows * are not in the order which will work with foreign keys. */ private static void checkRoundTrip(DataStore inputStore) { String dump = new SqlDumper().dump(inputStore); Database database2 = new Database(); try { database2.executeScript(new StringReader(dump)); } catch (MayflyException e) { throw new RuntimeException( "failure in command: " + e.failingCommand() + "\ndump was:\n" + dump , e); } String dump2 = new SqlDumper().dump(database2.dataStore()); assertEquals(dump, dump2); } private String dumpData() throws IOException { StringWriter out = new StringWriter(); new SqlDumper().data(database.dataStore(), out); return out.toString(); } private String dump() { return dump(database); } private String dump(Database aDatabase) { return new SqlDumper().dump(aDatabase.dataStore()); } }
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.jspf.parser; import org.apache.james.jspf.core.Configuration; import org.apache.james.jspf.core.Directive; import org.apache.james.jspf.core.Logger; import org.apache.james.jspf.core.Mechanism; import org.apache.james.jspf.core.Modifier; import org.apache.james.jspf.core.SPF1Constants; import org.apache.james.jspf.core.SPF1Record; import org.apache.james.jspf.core.SPFRecordParser; import org.apache.james.jspf.exceptions.NeutralException; import org.apache.james.jspf.exceptions.NoneException; import org.apache.james.jspf.exceptions.PermErrorException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * This class is used to parse SPF1-Records from their textual form to an * SPF1Record object that is composed by 2 collections: directives and * modifiers. * * The parsing is modular and get informations from Mechanism and Modifiers * classes declared in the org/apache/james/jspf/parser/jspf.default.terms file. * * Each term implementation provide its own REGEX in the REGEX static public * field. This parser simply join all the regexp in a single "alternative" * pattern and count the number of catch groups (brackets) assigned to each * regex fragment. * * SO it creates a big regex and an array where it store what term is associated * to each catch group of the big regex. * * If the regex matches the input vspf1 record then it start looking for the * matched group (not null) and lookup the term that created that part of the * regex. * * With this informations it creates a new instance of the term and, if the term * is Configurable it calls the config() method passing to it only the specific * subset of the MatchResult (using the MatchResultSubset). * * TODO doubts about the specification - redirect or exp with no domain-spec are * evaluated as an unknown-modifiers according to the current spec (it does not * make too much sense) - top-label is defined differently in various specs. * We'll have to review the code. - * http://data.iana.org/TLD/tlds-alpha-by-domain.txt (we should probably beeter * use and alpha sequence being at least 2 chars - Somewhere is defined as "." * TLD [ "." ] - Otherwise defined as ( *alphanum ALPHA *alphanum ) / ( * 1*alphanum "-" *( * alphanum / "-" ) alphanum ) * * @see org.apache.james.jspf.core.SPF1Record * */ public class DefaultSPF1Parser implements SPFRecordParser { /** * Regex based on http://www.ietf.org/rfc/rfc4408.txt. * This will be the next official SPF-Spec */ // Changed this because C, T and R MACRO_LETTERS are not available // in record parsing and must return a PermError. // private static final String MACRO_LETTER_PATTERN = "[lsodipvhcrtLSODIPVHCRT]"; /** * ABNF: qualifier = "+" / "-" / "?" / "~" */ private static final String QUALIFIER_PATTERN = "[" + "\\" + SPF1Constants.PASS + "\\" + SPF1Constants.FAIL + "\\" + SPF1Constants.NEUTRAL + "\\" + SPF1Constants.SOFTFAIL + "]"; private Pattern termsSeparatorPattern = null; private Pattern termPattern = null; private int TERM_STEP_REGEX_QUALIFIER_POS; private int TERM_STEP_REGEX_MECHANISM_POS; private int TERM_STEP_REGEX_MODIFIER_POS; private List matchResultPositions; private Logger log; private TermsFactory termsFactory; /** * Constructor. Creates all the values needed to run the parsing * * @param loggerThe logger to use */ public DefaultSPF1Parser(Logger logger, TermsFactory termsFactory) { this.log = logger; this.termsFactory = termsFactory; /** * ABNF: mechanism = ( all / include / A / MX / PTR / IP4 / IP6 / exists ) */ String MECHANISM_REGEX = createRegex(termsFactory.getMechanismsCollection()); /** * ABNF: modifier = redirect / explanation / unknown-modifier */ String MODIFIER_REGEX = "(" + createRegex(termsFactory.getModifiersCollection()) + ")"; /** * ABNF: directive = [ qualifier ] mechanism */ String DIRECTIVE_REGEX = "(" + QUALIFIER_PATTERN + "?)(" + MECHANISM_REGEX + ")"; /** * ABNF: ( directive / modifier ) */ String TERM_REGEX = "(?:" + MODIFIER_REGEX + "|" + DIRECTIVE_REGEX + ")"; /** * ABNF: 1*SP */ String TERMS_SEPARATOR_REGEX = "[ ]+"; termsSeparatorPattern = Pattern.compile(TERMS_SEPARATOR_REGEX); termPattern = Pattern.compile(TERM_REGEX); initializePositions(); } /** * Fill in the matchResultPositions ArrayList. This array simply map each * regex matchgroup to the Term class that originated that part of the * regex. */ private void initializePositions() { ArrayList matchResultPositions = new ArrayList(); // FULL MATCH int posIndex = 0; matchResultPositions.ensureCapacity(posIndex + 1); matchResultPositions.add(posIndex, null); Iterator i; TERM_STEP_REGEX_MODIFIER_POS = ++posIndex; matchResultPositions.ensureCapacity(posIndex + 1); matchResultPositions.add(TERM_STEP_REGEX_MODIFIER_POS, null); i = termsFactory.getModifiersCollection().iterator(); while (i.hasNext()) { TermDefinition td = (TermDefinition) i.next(); int size = td.getMatchSize() + 1; for (int k = 0; k < size; k++) { posIndex++; matchResultPositions.ensureCapacity(posIndex + 1); matchResultPositions.add(posIndex, td); } } TERM_STEP_REGEX_QUALIFIER_POS = ++posIndex; matchResultPositions.ensureCapacity(posIndex + 1); matchResultPositions.add(posIndex, null); TERM_STEP_REGEX_MECHANISM_POS = ++posIndex; matchResultPositions.ensureCapacity(posIndex + 1); matchResultPositions.add(TERM_STEP_REGEX_MECHANISM_POS, null); i = termsFactory.getMechanismsCollection().iterator(); while (i.hasNext()) { TermDefinition td = (TermDefinition) i.next(); int size = td.getMatchSize() + 1; for (int k = 0; k < size; k++) { posIndex++; matchResultPositions.ensureCapacity(posIndex + 1); matchResultPositions.add(posIndex, td); } } if (log.isDebugEnabled()) { log.debug("Parsing catch group positions: Modifiers[" + TERM_STEP_REGEX_MODIFIER_POS + "] Qualifier[" + TERM_STEP_REGEX_QUALIFIER_POS + "] Mechanism[" + TERM_STEP_REGEX_MECHANISM_POS + "]"); for (int k = 0; k < matchResultPositions.size(); k++) { log .debug(k + ") " + (matchResultPositions.get(k) != null ? ((TermDefinition) matchResultPositions .get(k)).getPattern().pattern() : null)); } } this.matchResultPositions = Collections.synchronizedList(matchResultPositions); } /** * Loop the classes searching for a String static field named * staticFieldName and create an OR regeex like this: * (?:FIELD1|FIELD2|FIELD3) * * @param classes * classes to analyze * @param staticFieldName * static field to concatenate * @return regex The regex */ private String createRegex(Collection commandMap) { StringBuffer modifierRegex = new StringBuffer(); Iterator i = commandMap.iterator(); boolean first = true; while (i.hasNext()) { if (first) { modifierRegex.append("(?:("); first = false; } else { modifierRegex.append(")|("); } Pattern pattern = ((TermDefinition) i.next()).getPattern(); modifierRegex.append(pattern.pattern()); } modifierRegex.append("))"); return modifierRegex.toString(); } /** * @see org.apache.james.jspf.parser.SPFRecordParser#parse(java.lang.String) */ public SPF1Record parse(String spfRecord) throws PermErrorException, NoneException, NeutralException { log.debug("Start parsing SPF-Record: " + spfRecord); SPF1Record result = new SPF1Record(); // check the version "header" if (spfRecord.toLowerCase().startsWith(SPF1Constants.SPF_VERSION + " ") || spfRecord.equalsIgnoreCase(SPF1Constants.SPF_VERSION)) { if (!spfRecord.toLowerCase().startsWith(SPF1Constants.SPF_VERSION + " ")) throw new NeutralException("Empty SPF Record"); } else { throw new NoneException("No valid SPF Record: " + spfRecord); } // extract terms String[] terms = termsSeparatorPattern.split(spfRecord.replaceFirst( SPF1Constants.SPF_VERSION, "")); // cycle terms for (int i = 0; i < terms.length; i++) { if (terms[i].length() > 0) { Matcher termMatcher = termPattern.matcher(terms[i]); if (!termMatcher.matches()) { throw new PermErrorException("Term [" + terms[i] + "] is not syntactically valid: " + termPattern.pattern()); } // true if we matched a modifier, false if we matched a // directive String modifierString = termMatcher .group(TERM_STEP_REGEX_MODIFIER_POS); if (modifierString != null) { // MODIFIER Modifier mod = (Modifier) lookupAndCreateTerm(termMatcher, TERM_STEP_REGEX_MODIFIER_POS); if (mod.enforceSingleInstance()) { Iterator it = result.getModifiers().iterator(); while (it.hasNext()) { if (it.next().getClass().equals(mod.getClass())) { throw new PermErrorException("More than one " + modifierString + " found in SPF-Record"); } } } result.getModifiers().add(mod); } else { // DIRECTIVE String qualifier = termMatcher .group(TERM_STEP_REGEX_QUALIFIER_POS); Object mech = lookupAndCreateTerm(termMatcher, TERM_STEP_REGEX_MECHANISM_POS); result.getDirectives().add( new Directive(qualifier, (Mechanism) mech, log.getChildLogger(qualifier+"directive"))); } } } return result; } /** * @param res * the MatchResult * @param start * the position where the terms starts * @return * @throws PermErrorException */ private Object lookupAndCreateTerm(Matcher res, int start) throws PermErrorException { for (int k = start + 1; k < res.groupCount(); k++) { if (res.group(k) != null && k != TERM_STEP_REGEX_QUALIFIER_POS) { TermDefinition c = (TermDefinition) matchResultPositions.get(k); Configuration subres = new MatcherBasedConfiguration(res, k, c .getMatchSize()); try { return termsFactory.createTerm(c.getTermDef(), subres); } catch (InstantiationException e) { e.printStackTrace(); // TODO is it ok to use a Runtime for this? Or should we use a PermError here? throw new IllegalStateException("Unexpected error creating term: " + e.getMessage()); } } } return null; } }
package org.jgroups.tests; import org.jgroups.Global; import org.jgroups.util.BlockingInputStream; import org.jgroups.util.Util; import org.testng.annotations.Test; import java.io.*; import java.util.*; import java.util.concurrent.CountDownLatch; /** * Tests {@link org.jgroups.util.BlockingInputStream} * @author Bela Ban */ @Test(groups=Global.FUNCTIONAL) public class BlockingInputStreamTest { public void testCreation() throws IOException { BlockingInputStream in=new BlockingInputStream(2000); System.out.println("in = " + in); assert in.available() == 0 && in.capacity() == 2000; in.write(new byte[]{'b', 'e', 'l', 'a'}); System.out.println("in = " + in); assert in.available() == 4 && in.capacity() == 2000; } public void testRead() throws IOException { final BlockingInputStream in=new BlockingInputStream(100); byte[] input={'B', 'e', 'l', 'a'}; in.write(input); in.close(); assert in.available() == 4; for(int i=0; i < input.length; i++) { int b=in.read(); assert b == input[i]; } int b=in.read(); assert b == -1; } public void testBlockingReadAndClose() throws IOException { final BlockingInputStream in=new BlockingInputStream(100); final CountDownLatch latch=new CountDownLatch(1); byte[] buf=new byte[100]; new Closer(latch, in, 1000L).start(); // closes input stream after 1 sec latch.countDown(); int num=in.read(buf, 0, buf.length); assert num == -1 : " expected -1 (EOF) but got " + num; } public void testBlockingWriteAndClose() throws IOException { final BlockingInputStream in=new BlockingInputStream(3); final CountDownLatch latch=new CountDownLatch(1); byte[] buf={'B', 'e', 'l', 'a'}; new Closer(latch, in, 1000L).start(); // closes input stream after 1 sec latch.countDown(); in.write(buf, 0, buf.length); } public void testReadOnClosedInputStream() throws IOException { final BlockingInputStream in=new BlockingInputStream(100); in.close(); byte[] buf=new byte[100]; int num=in.read(buf, 0, buf.length); assert num == -1 : " expected -1 (EOF) but got " + num; } public void testWriteCloseRead() throws IOException { final BlockingInputStream in=new BlockingInputStream(100); for(int i=1; i <= 5; i++) { byte[] buf=("Hello world " + i).getBytes(); in.write(buf); } in.close(); int size=in.available(); byte[] buf=new byte[100]; int num=in.read(buf); assert num == size; } public void testWriteCloseRead2() throws IOException { final BlockingInputStream in=new BlockingInputStream(100); StringBuilder sb=new StringBuilder(); for(int i=1; i <=10; i++) sb.append("Hello world " + i); byte[] buffer=sb.toString().getBytes(); new Writer(in, buffer).start(); Util.sleep(500); byte[] buf=new byte[200]; int num=in.read(buf); assert num == buffer.length; } public void testWriteCloseRead3() throws IOException { final BlockingInputStream in=new BlockingInputStream(300); StringBuilder sb=new StringBuilder(); for(int i=1; i <=10; i++) sb.append("Hello world " + i); byte[] buffer=sb.toString().getBytes(); new Writer(in, buffer).execute(); // don't use a separate thread byte[] buf=new byte[200]; int num=in.read(buf); assert num == buffer.length; } public void testSimpleTransfer() throws IOException { final BlockingInputStream in=new BlockingInputStream(100); byte[] buffer=new byte[500]; for(int i=0; i < buffer.length; i++) buffer[i]=(byte)(i % 2 == 0? 0 : 1); new Writer(in, buffer).start(); byte[] tmp=new byte[500]; int offset=0; while(true) { int bytes=in.read(tmp, offset, tmp.length - offset); if(bytes == -1) break; offset+=bytes; } System.out.println("read " + offset + " bytes"); assert offset == 500 : "offset is " + offset + " but expected 500"; for(int i=0; i < tmp.length; i++) { if(i % 2 == 0) assert tmp[i] == 0; else assert tmp[i] == 1; } } public void testLargeTransfer() throws IOException { final BlockingInputStream in=new BlockingInputStream(8192); final byte[] buffer=generateBuffer(1000000); new Writer(in, buffer).start(); byte[] tmp=new byte[buffer.length]; int offset=0; while(true) { int bytes=in.read(tmp, offset, tmp.length - offset); if(bytes == -1) break; offset+=bytes; } System.out.println("read " + offset + " bytes"); assert offset == buffer.length : "offset is " + offset + " but expected " + buffer.length; System.out.print("Verifying that the buffers are the same: "); for(int i=0; i < tmp.length; i++) assert buffer[i] == tmp[i]; System.out.println("OK"); } public void testLargeTransfer2() throws IOException { final BlockingInputStream in=new BlockingInputStream(8192); final byte[] buffer=generateBuffer(1000000); new Writer(in, buffer).start(); byte[] tmp=new byte[buffer.length]; int bytes=in.read(tmp); // reads 1 million bytes in one go System.out.println("read " + bytes + " bytes"); assert bytes == buffer.length : "read " + bytes + " bytes but expected " + buffer.length; System.out.print("Verifying that the buffers are the same: "); for(int i=0; i < tmp.length; i++) assert buffer[i] == tmp[i]; System.out.println("OK"); } public void testWriterMultipleChunks() throws Exception { final BlockingInputStream in=new BlockingInputStream(100); final byte[] buffer=generateBuffer(500); Writer writer=new Writer(in, buffer, 5, true); writer.start(); byte[] tmp=new byte[20]; int num=0; while(true) { int read=in.read(tmp); if(read == -1) break; num+=read; } System.out.println("read " + num + " bytes"); assert num == 5 * buffer.length; } public void testMultipleWriters() throws Exception { final BlockingInputStream in=new BlockingInputStream(100); final byte[] buffer=generateBuffer(500); final Writer[] writers=new Writer[5]; for(int i=0; i < writers.length; i++) { writers[i]=new Writer(in, buffer, 1, false); writers[i].setName("writer-" + (i+1)); writers[i].start(); } new Thread(() -> { while(true) { boolean all_done=true; for(Writer writer: writers) { if(writer.isAlive()) { all_done=false; break; } } if(all_done) { Util.close(in); return; } else Util.sleep(100); } }).start(); byte[] tmp=new byte[400]; int num=0; while(true) { int read=in.read(tmp, 0, tmp.length); if(read == -1) break; num+=read; } System.out.println("read " + num + " bytes"); assert num == writers.length * buffer.length; for(Writer writer: writers) assert writer.isAlive() == false; } public void testWriteExceedingCapacity() throws IOException { final BlockingInputStream in=new BlockingInputStream(10); new Thread(() -> { byte[] tmp=new byte[20]; int num=0; try { while(true) { int read=in.read(tmp); if(read == -1) break; num+=read; } System.out.println("read " + num + " bytes"); } catch(IOException e) { e.printStackTrace(); } }).start(); byte[] buffer=new byte[15]; try { in.write(buffer); } finally { Util.close(in); } } public void testWritingBeyondLength() throws IOException { final BlockingInputStream in=new BlockingInputStream(800); new Thread(() -> { byte[] buf=new byte[800+600]; try { in.write(buf); } catch(IOException e) { e.printStackTrace(); } }).start(); byte[] buf=new byte[1000]; int read=in.read(buf); assert read == buf.length; } public void testSimpleWrite() throws Exception { final BlockingInputStream input=new BlockingInputStream(8192); byte[] in={'B', 'e', 'l', 'a'}; input.write(in); byte[] buf=new byte[5]; for(int i=0; i < in.length; i++) { int read=input.read(buf, i, 1); assert read == 1; } for(int i=0; i < in.length; i++) assert in[i] == buf[i]; } public void testObjectStreaming() throws Exception { final BlockingInputStream input=new BlockingInputStream(8192); Map<String,List<Long>> map=new HashMap<>(4); for(String key: Arrays.asList("A", "B", "C", "D")) { List<Long> list=new ArrayList<>(1000); map.put(key, list); for(int i=1; i <= 1000; i++) list.add((long)i); } ByteArrayOutputStream output=new ByteArrayOutputStream(8192); OutputStream out=new BufferedOutputStream(output); Util.objectToStream(map, new DataOutputStream(out)); out.flush(); final byte[] buffer=output.toByteArray(); Thread writer=new Thread(() -> { try { input.write(buffer); } catch(IOException e) { e.printStackTrace(); } }); writer.start(); Map<String,List<Long>> tmp=Util.objectFromStream(new DataInputStream(input)); assert tmp.size() == 4; for(String key: Arrays.asList("A", "B", "C", "D")) { List<Long> list=map.get(key); assert list.size() == 1000; assert list.iterator().next() == 1; } } protected static byte[] generateBuffer(int size) { byte[] buf=new byte[size]; for(int i=0; i < buf.length; i++) buf[i]=(byte)(Util.random(size) % Byte.MAX_VALUE); return buf; } protected static final class Closer extends Thread { protected final CountDownLatch latch; protected final InputStream in; protected final long timeout; public Closer(CountDownLatch latch, InputStream in, long timeout) { this.latch=latch; this.in=in; this.timeout=timeout; } public void run() { try { latch.await(); Util.sleep(timeout); in.close(); } catch(Exception e) { e.printStackTrace(); } } } protected static final class Writer extends Thread { protected final BlockingInputStream in; protected final byte[] buffer; protected final int num_times; protected final boolean close_input; public Writer(BlockingInputStream in, byte[] buffer, int num_times, boolean close_input) { this.in=in; this.buffer=buffer; this.num_times=num_times; this.close_input=close_input; } public Writer(BlockingInputStream in, byte[] buffer) { this(in, buffer, 1, true); } public void run() { execute(); } public void execute() { try { for(int i=0; i < num_times; i++) { in.write(buffer, 0, buffer.length); System.out.println(Thread.currentThread().getId() + ": wrote " + buffer.length + " bytes"); } if(close_input) Util.close(in); } catch(IOException e) { System.err.println(e); } } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2015 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.utils; import java.awt.Font; import java.awt.FontFormatException; import java.awt.GraphicsEnvironment; import java.io.IOException; import java.util.EnumMap; import javax.swing.JLabel; import javax.swing.UIManager; public class FontUtils { public static enum Size { smallest, much_smaller, smaller, standard, larger, much_larger, huge }; public static enum FontType { general, workPanels }; private static float scale = -1; private static EnumMap<FontType, Font> defaultFonts = new EnumMap<>(FontType.class); private static EnumMap<FontType, Boolean> defaultFontSets = new EnumMap<>(FontType.class); private static Font systemDefaultFont; private static Font quicksandBoldFont; public static Font getSystemDefaultFont() { if (systemDefaultFont == null) { systemDefaultFont = (Font) UIManager.getLookAndFeelDefaults().get("defaultFont"); if (systemDefaultFont == null) { systemDefaultFont = new JLabel("").getFont(); } } return systemDefaultFont; } /** * Returns the Quicksand Bold font - * https://fonts.google.com/specimen/Quicksand?selection.family=Quicksand * * @since 2.7.0 * @return the Quicksand Bold font */ public static Font getQuicksandBoldFont() { if (quicksandBoldFont == null) { try { quicksandBoldFont = Font.createFont( Font.TRUETYPE_FONT, FontUtils.class.getResourceAsStream( "/resource/Quicksand-Bold.ttf")); GraphicsEnvironment.getLocalGraphicsEnvironment().registerFont(quicksandBoldFont); // Ensure its scaled properly - only need to do this when its first loaded quicksandBoldFont = quicksandBoldFont.deriveFont((float) getDefaultFont().getSize()); } catch (IOException | FontFormatException e) { quicksandBoldFont = defaultFonts.get(FontType.general); } } return quicksandBoldFont; } public static boolean canChangeSize() { return UIManager.getLookAndFeelDefaults().get("defaultFont") != null; } public static void setDefaultFont(FontType fontType, Font font) { if (canChangeSize()) { getSystemDefaultFont(); // Make sure the system default font is saved first defaultFonts.put(fontType, font); scale = -1; // force it to be recalculated if (fontType == FontType.general) { UIManager.getLookAndFeelDefaults().put("defaultFont", font); } } } public static void setDefaultFont(FontType fontType, String name, int size) { // A blank font name works fine. // For some reason getting the default font name doesn't work - it doesn't seem to get // applied everywhere // No ideas why :/ if (size <= 5) { size = getDefaultFont().getSize(); } defaultFontSets.put(fontType, (name != null && !name.isEmpty())); setDefaultFont(fontType, new Font(name, Font.PLAIN, size)); } private static Font getDefaultFont() { return getDefaultFont(FontType.general); } private static Font getDefaultFont(FontType fontType) { if (defaultFonts.get(fontType) == null) { defaultFonts.put(fontType, Font.getFont("defaultFont")); if (defaultFonts.get(fontType) == null) { defaultFonts.put(fontType, new JLabel("").getFont()); } } return defaultFonts.get(fontType); } /** * Gets the named font, correctly scaled * * @param name * @return the named font, correctly scaled */ public static Font getFont(String name) { return getFont(name, Font.PLAIN); } /** * Gets the default font with the specified style, correctly scaled * * @param style * @return the default font with the specified style, correctly scaled */ public static Font getFont(int style) { return getDefaultFont().deriveFont(style); } /** * Gets the font for the give {@link FontType} * * @param fontType the {@code FontType} for which the font should be returned * @return font */ public static Font getFont(FontType fontType) { return getDefaultFont(fontType); } /** * Gets font for the given {@link FontType} or the fallback font with the given name if no font * is set for the given {@code FontType} * * @param fontType the {@code FontType} for which the font should be returned * @param fallbackFontName the name ({@code String}) of the font which will be returned of no * font is set for the given {@code FontType} * @return work panels font or fallback font */ public static Font getFontWithFallback(FontType fontType, String fallbackFontName) { if (isDefaultFontSet(fontType)) { return getFont(fontType); } else { return getFont(fallbackFontName); } } /** * Gets the named font with the specified style, correctly scaled * * @param name * @param style * @return the named font with the specified style, correctly scaled */ public static Font getFont(String name, int style) { return new Font(name, style, getDefaultFont().getSize()); } /** * Gets the default font with the specified style and size, correctly scaled * * @param style * @param size * @return */ public static Font getFont(int style, Size size) { return getFont(getDefaultFont(), size).deriveFont(style); } /** * Gets the specified font with the specified style and size, correctly scaled * * @param style * @param size * @since 2.7.0 * @return */ public static Font getFont(Font font, int style, Size size) { return getFont(font, size).deriveFont(style); } /** * Gets the default font with the specified size, correctly scaled * * @param size * @return the default font with the specified size, correctly scaled */ public static Font getFont(Size size) { return getFont(getDefaultFont(), size); } /** * Gets the specified font with the specified size, correctly scaled * * @param font * @param size * @since 2.7.0 * @return the specified font with the specified size, correctly scaled */ public static Font getFont(Font font, Size size) { float s; switch (size) { case smallest: s = (float) (font.getSize() * 0.5); break; case much_smaller: s = (float) (font.getSize() * 0.7); break; case smaller: s = (float) (font.getSize() * 0.8); break; case standard: s = (float) font.getSize(); break; case larger: s = (float) (font.getSize() * 1.5); break; case much_larger: s = (float) (font.getSize() * 3); break; case huge: s = (float) (font.getSize() * 4); break; default: s = (float) (font.getSize()); break; } return font.deriveFont(s); } public static float getScale() { if (scale == -1) { scale = getDefaultFont().getSize2D() / getSystemDefaultFont().getSize2D(); } return scale; } /** * Tells whether or not a custom default font was set. * * <p>If no custom font was set it's used the system default font. * * @return {@code true} if a custom font was set, {@code false} otherwise. * @since 2.7.0 * @see #getSystemDefaultFont() */ public static boolean isDefaultFontSet() { return defaultFontSets.get(FontType.general); } /** * Tells whether or not a custom default font was set for the given {@link FontType}. * * @return {@code true} if a custom font was set, {@code false} otherwise. * @since 2.8.0 * @see #getSystemDefaultFont() */ public static boolean isDefaultFontSet(FontType fontType) { return defaultFontSets.get(fontType); } }
package com.bazaarvoice.emodb.auth; import com.bazaarvoice.emodb.auth.apikey.ApiKey; import com.bazaarvoice.emodb.auth.apikey.ApiKeyModification; import com.bazaarvoice.emodb.auth.apikey.ApiKeyRequest; import com.bazaarvoice.emodb.auth.identity.InMemoryAuthIdentityManager; import com.bazaarvoice.emodb.auth.jersey.Authenticated; import com.bazaarvoice.emodb.auth.jersey.Subject; import com.bazaarvoice.emodb.auth.permissions.InMemoryPermissionManager; import com.bazaarvoice.emodb.auth.permissions.MatchingPermissionResolver; import com.bazaarvoice.emodb.auth.permissions.PermissionIDs; import com.bazaarvoice.emodb.auth.permissions.PermissionUpdateRequest; import com.bazaarvoice.emodb.auth.test.ResourceTestAuthUtil; import com.google.common.collect.ImmutableMap; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; import io.dropwizard.testing.junit.ResourceTestRule; import org.apache.shiro.authz.annotation.RequiresAuthentication; import org.apache.shiro.authz.annotation.RequiresPermissions; import org.junit.After; import org.junit.Rule; import org.junit.Test; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.net.URLEncoder; import java.util.Map; import static com.bazaarvoice.emodb.auth.permissions.MatchingPermission.escape; import static java.lang.String.format; import static org.testng.Assert.assertEquals; public class ResourcePermissionsTest { private InMemoryAuthIdentityManager<ApiKey> _authIdentityDAO = new InMemoryAuthIdentityManager<>(); private InMemoryPermissionManager _permissionDAO = new InMemoryPermissionManager(new MatchingPermissionResolver()); @Rule public ResourceTestRule _resourceTestRule = setupResourceTestRule(); private enum PermissionCheck { EXPLICIT, PATH, QUERY } @Path("explicit/country/{country}") @Produces(MediaType.TEXT_PLAIN) @RequiresAuthentication public static class ExplicitPermissionResource { @Path("city/{city}") @GET public String getCity(@PathParam("country") String country, @PathParam("city") String city, @Authenticated Subject subject) { if (!subject.hasPermissions(format("country|get|%s", escape(country)), format("city|get|%s", escape(city)))) { throw new WebApplicationException(Response.Status.FORBIDDEN); } return format("Welcome to %s, %s", city, country); } } @Path("path/country/{country}") @Produces(MediaType.TEXT_PLAIN) @RequiresPermissions("country|get|{country}") public static class PathPermissionResource { @Path("city/{city}") @RequiresPermissions("city|get|{city}") @GET public String getCity(@PathParam("country") String country, @PathParam("city") String city) { return format("Welcome to %s, %s", city, country); } } @Path("query/welcome") @Produces(MediaType.TEXT_PLAIN) public static class QueryPermissionResource { @RequiresPermissions({"country|get|{?country}", "city|get|{?city}"}) @GET public String getCity(@QueryParam("country") String country, @QueryParam("city") String city) { return format("Welcome to %s, %s", city, country); } } protected ResourceTestRule setupResourceTestRule() { ResourceTestRule.Builder resourceTestRuleBuilder = ResourceTestRule.builder(); ResourceTestAuthUtil.setUpResources(resourceTestRuleBuilder, SecurityManagerBuilder.create() .withAuthIdentityReader(_authIdentityDAO) .withPermissionReader(_permissionDAO) .withAnonymousAccessAs("anon") .build()); resourceTestRuleBuilder.addResource(new ExplicitPermissionResource()); resourceTestRuleBuilder.addResource(new PathPermissionResource()); resourceTestRuleBuilder.addResource(new QueryPermissionResource()); return resourceTestRuleBuilder.build(); } @After public void cleanupTest() { _authIdentityDAO.reset(); _permissionDAO.reset(); } @Test public void testGetWithMissingIdentityExplicit() throws Exception { testGetWithMissingIdentity(PermissionCheck.EXPLICIT); } @Test public void testGetWithMissingIdentityPath() throws Exception { testGetWithMissingIdentity(PermissionCheck.PATH); } @Test public void testGetWithMissingIdentityQuery() throws Exception { testGetWithMissingIdentity(PermissionCheck.QUERY); } private void testGetWithMissingIdentity(PermissionCheck permissionCheck) throws Exception { ClientResponse response = getCountryAndCity(permissionCheck, "Spain", "Madrid", null); assertEquals(response.getStatus(), Response.Status.FORBIDDEN.getStatusCode()); } @Test public void testGetWithNonExistentIdentityExplicit() throws Exception { testGetWithNonExistentIdentity(PermissionCheck.EXPLICIT); } @Test public void testGetWithNonExistentIdentityPath() throws Exception { testGetWithNonExistentIdentity(PermissionCheck.PATH); } @Test public void testGetWithNonExistentIdentityQuery() throws Exception { testGetWithNonExistentIdentity(PermissionCheck.QUERY); } private void testGetWithNonExistentIdentity(PermissionCheck permissionCheck) throws Exception { ClientResponse response = getCountryAndCity(permissionCheck, "Spain", "Madrid", "testkey"); assertEquals(response.getStatus(), Response.Status.FORBIDDEN.getStatusCode()); } @Test public void testGetWithMissingPermissionExplicit() throws Exception { testGetWithMissingPermission(PermissionCheck.EXPLICIT); } @Test public void testGetWithMissingPermissionPath() throws Exception { testGetWithMissingPermission(PermissionCheck.PATH); } @Test public void testGetWithMissingPermissionQuery() throws Exception { testGetWithMissingPermission(PermissionCheck.QUERY); } private void testGetWithMissingPermission(PermissionCheck permissionCheck) throws Exception { _authIdentityDAO.createIdentity("testkey", new ApiKeyModification().addRoles("testrole")); _permissionDAO.updatePermissions("testrole", new PermissionUpdateRequest().permit("country|get|Spain")); ClientResponse response = getCountryAndCity(permissionCheck, "Spain", "Madrid", "testkey"); assertEquals(response.getStatus(), Response.Status.FORBIDDEN.getStatusCode()); } @Test public void testGetWithMatchingPermissionsExplicit() throws Exception { testGetWithMatchingPermissions(PermissionCheck.EXPLICIT); } @Test public void testGetWithMatchingPermissionsPath() throws Exception { testGetWithMatchingPermissions(PermissionCheck.PATH); } @Test public void testGetWithMatchingPermissionsQuery() throws Exception { testGetWithMatchingPermissions(PermissionCheck.QUERY); } private void testGetWithMatchingPermissions(PermissionCheck permissionCheck) throws Exception { _authIdentityDAO.createIdentity("testkey", new ApiKeyModification().addRoles("testrole")); _permissionDAO.updatePermissions(PermissionIDs.forRole("testrole"), new PermissionUpdateRequest().permit("city|get|Madrid", "country|get|Spain")); ClientResponse response = getCountryAndCity(permissionCheck, "Spain", "Madrid", "testkey"); assertEquals(response.getEntity(String.class), "Welcome to Madrid, Spain"); } @Test public void testGetWithMatchingWildcardPermissionsExplicit() throws Exception { testGetWithMatchingWildcardPermissions(PermissionCheck.EXPLICIT); } @Test public void testGetWithMatchingWildcardPermissionsPath() throws Exception { testGetWithMatchingWildcardPermissions(PermissionCheck.PATH); } @Test public void testGetWithMatchingWildcardPermissionsQuery() throws Exception { testGetWithMatchingWildcardPermissions(PermissionCheck.QUERY); } private void testGetWithMatchingWildcardPermissions(PermissionCheck permissionCheck) throws Exception { _authIdentityDAO.createIdentity("testkey", new ApiKeyModification().addRoles("testrole")); _permissionDAO.updatePermissions(PermissionIDs.forRole("testrole"), new PermissionUpdateRequest().permit("city|get|*", "country|*|*")); ClientResponse response = getCountryAndCity(permissionCheck, "Spain", "Madrid", "testkey"); assertEquals(response.getEntity(String.class), "Welcome to Madrid, Spain"); } @Test public void testGetWithNonMatchingWildcardPermissionExplicit() throws Exception { testGetWithNonMatchingWildcardPermission(PermissionCheck.EXPLICIT); } @Test public void testGetWithNonMatchingWildcardPermissionPath() throws Exception { testGetWithNonMatchingWildcardPermission(PermissionCheck.PATH); } @Test public void testGetWithNonMatchingWildcardPermissionQuery() throws Exception { testGetWithNonMatchingWildcardPermission(PermissionCheck.QUERY); } private void testGetWithNonMatchingWildcardPermission(PermissionCheck permissionCheck) throws Exception { _authIdentityDAO.createIdentity("testkey", new ApiKeyModification().addRoles("testrole")); _permissionDAO.updatePermissions(PermissionIDs.forRole("testrole"), new PermissionUpdateRequest().permit("city|get|Madrid", "country|*|Portugal")); ClientResponse response = getCountryAndCity(permissionCheck, "Spain", "Madrid", "testkey"); assertEquals(response.getStatus(), Response.Status.FORBIDDEN.getStatusCode()); } @Test public void testGetWithEscapedPermissionExplicit() throws Exception { testGetWithEscapedPermission(PermissionCheck.EXPLICIT); } @Test public void testGetWithEscapedPermissionPath() throws Exception { testGetWithEscapedPermission(PermissionCheck.PATH); } @Test public void testGetWithEscapedPermissionQuery() throws Exception { testGetWithEscapedPermission(PermissionCheck.QUERY); } private void testGetWithEscapedPermission(PermissionCheck permissionCheck) throws Exception { _authIdentityDAO.createIdentity("testkey", new ApiKeyModification().addRoles("testrole")); _permissionDAO.updatePermissions(PermissionIDs.forRole("testrole"), new PermissionUpdateRequest().permit("city|get|Pipe\\|Town", "country|get|Star\\*Nation")); ClientResponse response = getCountryAndCity(permissionCheck, "Star*Nation", "Pipe|Town", "testkey"); assertEquals(response.getEntity(String.class), "Welcome to Pipe|Town, Star*Nation"); } @Test public void testAnonymousWithPermissionExplicit() throws Exception { testAnonymousWithPermission(PermissionCheck.EXPLICIT); } @Test public void testAnonymousWithPermissionPath() throws Exception { testAnonymousWithPermission(PermissionCheck.PATH); } @Test public void testAnonymousWithPermissionQuery() throws Exception { testAnonymousWithPermission(PermissionCheck.QUERY); } private void testAnonymousWithPermission(PermissionCheck permissionCheck) throws Exception { _authIdentityDAO.createIdentity("anon", new ApiKeyModification().addRoles("anonrole")); _permissionDAO.updatePermissions(PermissionIDs.forRole("anonrole"), new PermissionUpdateRequest().permit("city|get|Madrid", "country|get|Spain")); ClientResponse response = getCountryAndCity(permissionCheck, "Spain", "Madrid", null); assertEquals(response.getEntity(String.class), "Welcome to Madrid, Spain"); } @Test public void testAnonymousWithoutPermissionExplicit() throws Exception { testAnonymousWithoutPermission(PermissionCheck.EXPLICIT); } @Test public void testAnonymousWithoutPermissionQuery() throws Exception { testAnonymousWithoutPermission(PermissionCheck.PATH); } @Test public void testAnonymousWithoutPermissionPath() throws Exception { testAnonymousWithoutPermission(PermissionCheck.QUERY); } private void testAnonymousWithoutPermission(PermissionCheck permissionCheck) throws Exception { ClientResponse response = getCountryAndCity(permissionCheck, "Spain", "Madrid", null); assertEquals(response.getStatus(), Response.Status.FORBIDDEN.getStatusCode()); } private Map<PermissionCheck, String> _uriFormatMap = ImmutableMap.of( PermissionCheck.EXPLICIT, "/explicit/country/%s/city/%s", PermissionCheck.PATH, "/path/country/%s/city/%s", PermissionCheck.QUERY, "/query/welcome?country=%s&city=%s"); private ClientResponse getCountryAndCity(PermissionCheck permissionCheck, String country, String city, String apiKey) throws Exception { String uri = format(_uriFormatMap.get(permissionCheck), URLEncoder.encode(country, "UTF-8"), URLEncoder.encode(city, "UTF-8")); WebResource resource = _resourceTestRule.client().resource(uri); if (apiKey != null) { return resource.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey).get(ClientResponse.class); } return resource.get(ClientResponse.class); } }
package de.danoeh.antennapod.core.storage; import android.app.backup.BackupManager; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.database.Cursor; import android.preference.PreferenceManager; import android.util.Log; import org.shredzone.flattr4j.model.Flattr; import java.io.File; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ThreadFactory; import de.danoeh.antennapod.core.BuildConfig; import de.danoeh.antennapod.core.ClientConfig; import de.danoeh.antennapod.core.asynctask.FlattrClickWorker; import de.danoeh.antennapod.core.feed.EventDistributor; import de.danoeh.antennapod.core.feed.Feed; import de.danoeh.antennapod.core.feed.FeedEvent; import de.danoeh.antennapod.core.feed.FeedImage; import de.danoeh.antennapod.core.feed.FeedItem; import de.danoeh.antennapod.core.feed.FeedMedia; import de.danoeh.antennapod.core.feed.FeedPreferences; import de.danoeh.antennapod.core.feed.QueueEvent; import de.danoeh.antennapod.core.gpoddernet.model.GpodnetEpisodeAction; import de.danoeh.antennapod.core.preferences.GpodnetPreferences; import de.danoeh.antennapod.core.preferences.PlaybackPreferences; import de.danoeh.antennapod.core.preferences.UserPreferences; import de.danoeh.antennapod.core.service.download.DownloadStatus; import de.danoeh.antennapod.core.service.playback.PlaybackService; import de.danoeh.antennapod.core.util.LongList; import de.danoeh.antennapod.core.util.flattr.FlattrStatus; import de.danoeh.antennapod.core.util.flattr.FlattrThing; import de.danoeh.antennapod.core.util.flattr.SimpleFlattrThing; import de.greenrobot.event.EventBus; /** * Provides methods for writing data to AntennaPod's database. * In general, DBWriter-methods will be executed on an internal ExecutorService. * Some methods return a Future-object which the caller can use for waiting for the method's completion. The returned Future's * will NOT contain any results. * The caller can also use the {@link EventDistributor} in order to be notified about the method's completion asynchronously. * This class will use the {@link EventDistributor} to notify listeners about changes in the database. */ public class DBWriter { private static final String TAG = "DBWriter"; private static final ExecutorService dbExec; static { dbExec = Executors.newSingleThreadExecutor(new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setPriority(Thread.MIN_PRIORITY); return t; } }); } private DBWriter() { } /** * Deletes a downloaded FeedMedia file from the storage device. * * @param context A context that is used for opening a database connection. * @param mediaId ID of the FeedMedia object whose downloaded file should be deleted. */ public static Future<?> deleteFeedMediaOfItem(final Context context, final long mediaId) { return dbExec.submit(new Runnable() { @Override public void run() { final FeedMedia media = DBReader.getFeedMedia(context, mediaId); if (media != null) { Log.i(TAG, String.format("Requested to delete FeedMedia [id=%d, title=%s, downloaded=%s", media.getId(), media.getEpisodeTitle(), String.valueOf(media.isDownloaded()))); boolean result = false; if (media.isDownloaded()) { // delete downloaded media file File mediaFile = new File(media.getFile_url()); if (mediaFile.exists()) { result = mediaFile.delete(); } media.setDownloaded(false); media.setFile_url(null); media.setHasEmbeddedPicture(false); PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setMedia(media); adapter.close(); // If media is currently being played, change playback // type to 'stream' and shutdown playback service SharedPreferences prefs = PreferenceManager .getDefaultSharedPreferences(context); if (PlaybackPreferences.getCurrentlyPlayingMedia() == FeedMedia.PLAYABLE_TYPE_FEEDMEDIA) { if (media.getId() == PlaybackPreferences .getCurrentlyPlayingFeedMediaId()) { SharedPreferences.Editor editor = prefs.edit(); editor.putBoolean( PlaybackPreferences.PREF_CURRENT_EPISODE_IS_STREAM, true); editor.commit(); } if (PlaybackPreferences .getCurrentlyPlayingFeedMediaId() == media .getId()) { context.sendBroadcast(new Intent( PlaybackService.ACTION_SHUTDOWN_PLAYBACK_SERVICE)); } } // Gpodder: queue delete action for synchronization if(GpodnetPreferences.loggedIn()) { FeedItem item = media.getItem(); GpodnetEpisodeAction action = new GpodnetEpisodeAction.Builder(item, GpodnetEpisodeAction.Action.DELETE) .currentDeviceId() .currentTimestamp() .build(); GpodnetPreferences.enqueueEpisodeAction(action); } } Log.d(TAG, "Deleting File. Result: " + result); EventBus.getDefault().post(new QueueEvent(QueueEvent.Action.DELETED_MEDIA, media.getItem())); EventDistributor.getInstance().sendUnreadItemsUpdateBroadcast(); } } }); } /** * Deletes a Feed and all downloaded files of its components like images and downloaded episodes. * * @param context A context that is used for opening a database connection. * @param feedId ID of the Feed that should be deleted. */ public static Future<?> deleteFeed(final Context context, final long feedId) { return dbExec.submit(new Runnable() { @Override public void run() { DownloadRequester requester = DownloadRequester.getInstance(); SharedPreferences prefs = PreferenceManager .getDefaultSharedPreferences(context .getApplicationContext()); final Feed feed = DBReader.getFeed(context, feedId); if (feed != null) { if (PlaybackPreferences.getCurrentlyPlayingMedia() == FeedMedia.PLAYABLE_TYPE_FEEDMEDIA && PlaybackPreferences.getLastPlayedFeedId() == feed .getId()) { context.sendBroadcast(new Intent( PlaybackService.ACTION_SHUTDOWN_PLAYBACK_SERVICE)); SharedPreferences.Editor editor = prefs.edit(); editor.putLong( PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEED_ID, -1); editor.commit(); } // delete image file if (feed.getImage() != null) { if (feed.getImage().isDownloaded() && feed.getImage().getFile_url() != null) { File imageFile = new File(feed.getImage() .getFile_url()); imageFile.delete(); } else if (requester.isDownloadingFile(feed.getImage())) { requester.cancelDownload(context, feed.getImage()); } } // delete stored media files and mark them as read List<FeedItem> queue = DBReader.getQueue(context); List<FeedItem> removed = new ArrayList<>(); if (feed.getItems() == null) { DBReader.getFeedItemList(context, feed); } for (FeedItem item : feed.getItems()) { if(queue.remove(item)) { removed.add(item); } if (item.getMedia() != null && item.getMedia().isDownloaded()) { File mediaFile = new File(item.getMedia() .getFile_url()); mediaFile.delete(); } else if (item.getMedia() != null && requester.isDownloadingFile(item.getMedia())) { requester.cancelDownload(context, item.getMedia()); } if (item.hasItemImage()) { FeedImage image = item.getImage(); if (image.isDownloaded() && image.getFile_url() != null) { File imgFile = new File(image.getFile_url()); imgFile.delete(); } else if (requester.isDownloadingFile(image)) { requester.cancelDownload(context, item.getImage()); } } } PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); if (removed.size() > 0) { adapter.setQueue(queue); EventBus.getDefault().post(new QueueEvent(QueueEvent.Action.IRREVERSIBLE_REMOVED, removed)); } adapter.removeFeed(feed); adapter.close(); if (ClientConfig.gpodnetCallbacks.gpodnetEnabled()) { GpodnetPreferences.addRemovedFeed(feed.getDownload_url()); } EventDistributor.getInstance().sendFeedUpdateBroadcast(); BackupManager backupManager = new BackupManager(context); backupManager.dataChanged(); } } }); } /** * Deletes the entire playback history. * * @param context A context that is used for opening a database connection. */ public static Future<?> clearPlaybackHistory(final Context context) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.clearPlaybackHistory(); adapter.close(); EventDistributor.getInstance() .sendPlaybackHistoryUpdateBroadcast(); } }); } /** * Deletes the entire download log. * * @param context A context that is used for opening a database connection. */ public static Future<?> clearDownloadLog(final Context context) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.clearDownloadLog(); adapter.close(); EventDistributor.getInstance() .sendDownloadLogUpdateBroadcast(); } }); } /** * Adds a FeedMedia object to the playback history. A FeedMedia object is in the playback history if * its playback completion date is set to a non-null value. This method will set the playback completion date to the * current date regardless of the current value. * * @param context A context that is used for opening a database connection. * @param media FeedMedia that should be added to the playback history. */ public static Future<?> addItemToPlaybackHistory(final Context context, final FeedMedia media) { return dbExec.submit(new Runnable() { @Override public void run() { if (BuildConfig.DEBUG) Log.d(TAG, "Adding new item to playback history"); media.setPlaybackCompletionDate(new Date()); // reset played_duration to 0 so that it behaves correctly when the episode is played again media.setPlayedDuration(0); PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setFeedMediaPlaybackCompletionDate(media); adapter.close(); EventDistributor.getInstance().sendPlaybackHistoryUpdateBroadcast(); } }); } private static void cleanupDownloadLog(final PodDBAdapter adapter) { final long logSize = adapter.getDownloadLogSize(); if (logSize > DBReader.DOWNLOAD_LOG_SIZE) { if (BuildConfig.DEBUG) Log.d(TAG, "Cleaning up download log"); adapter.removeDownloadLogItems(logSize - DBReader.DOWNLOAD_LOG_SIZE); } } /** * Adds a Download status object to the download log. * * @param context A context that is used for opening a database connection. * @param status The DownloadStatus object. */ public static Future<?> addDownloadStatus(final Context context, final DownloadStatus status) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setDownloadStatus(status); adapter.close(); EventDistributor.getInstance().sendDownloadLogUpdateBroadcast(); } }); } /** * Inserts a FeedItem in the queue at the specified index. The 'read'-attribute of the FeedItem will be set to * true. If the FeedItem is already in the queue, the queue will not be modified. * * @param context A context that is used for opening a database connection. * @param itemId ID of the FeedItem that should be added to the queue. * @param index Destination index. Must be in range 0..queue.size() * @param performAutoDownload True if an auto-download process should be started after the operation * @throws IndexOutOfBoundsException if index < 0 || index >= queue.size() */ public static Future<?> addQueueItemAt(final Context context, final long itemId, final int index, final boolean performAutoDownload) { return dbExec.submit(new Runnable() { @Override public void run() { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); final List<FeedItem> queue = DBReader.getQueue(context, adapter); FeedItem item; if (queue != null) { if (!itemListContains(queue, itemId)) { item = DBReader.getFeedItem(context, itemId); if (item != null) { queue.add(index, item); adapter.setQueue(queue); EventBus.getDefault().post(new QueueEvent(QueueEvent.Action.ADDED, item, index)); if(item.isNew()) { DBWriter.markItemRead(context, false, item.getId()); } } } } adapter.close(); if (performAutoDownload) { DBTasks.autodownloadUndownloadedItems(context); } } }); } public static Future<?> addQueueItem(final Context context, final long... itemIds) { return addQueueItem(context, false, itemIds); } /** * Appends FeedItem objects to the end of the queue. The 'read'-attribute of all items will be set to true. * If a FeedItem is already in the queue, the FeedItem will not change its position in the queue. * * @param context A context that is used for opening a database connection. * @param performAutoDownload true if an auto-download process should be started after the operation. * @param itemIds IDs of the FeedItem objects that should be added to the queue. */ public static Future<?> addQueueItem(final Context context, final boolean performAutoDownload, final long... itemIds) { return dbExec.submit(new Runnable() { @Override public void run() { if (itemIds.length > 0) { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); final List<FeedItem> queue = DBReader.getQueue(context, adapter); if (queue != null) { boolean queueModified = false; LongList markAsUnplayedIds = new LongList(); for (int i = 0; i < itemIds.length; i++) { if (!itemListContains(queue, itemIds[i])) { final FeedItem item = DBReader.getFeedItem(context, itemIds[i]); if (item != null) { // add item to either front ot back of queue boolean addToFront = UserPreferences.enqueueAtFront(); if (addToFront) { queue.add(0 + i, item); } else { queue.add(item); } queueModified = true; if(item.isNew()) { markAsUnplayedIds.add(item.getId()); } } } } if (queueModified) { adapter.setQueue(queue); EventBus.getDefault().post(new QueueEvent(QueueEvent.Action.ADDED_ITEMS, queue)); if(markAsUnplayedIds.size() > 0) { DBWriter.markItemRead(context, false, markAsUnplayedIds.toArray()); } } } adapter.close(); if (performAutoDownload) { DBTasks.autodownloadUndownloadedItems(context); } } } }); } /** * Removes all FeedItem objects from the queue. * * @param context A context that is used for opening a database connection. */ public static Future<?> clearQueue(final Context context) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.clearQueue(); adapter.close(); EventBus.getDefault().post(new QueueEvent(QueueEvent.Action.CLEARED)); } }); } /** * Removes a FeedItem object from the queue. * * @param context A context that is used for opening a database connection. * @param item FeedItem that should be removed. * @param performAutoDownload true if an auto-download process should be started after the operation. */ public static Future<?> removeQueueItem(final Context context, final FeedItem item, final boolean performAutoDownload) { return dbExec.submit(new Runnable() { @Override public void run() { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); final List<FeedItem> queue = DBReader.getQueue(context, adapter); if (queue != null) { int position = queue.indexOf(item); if(position >= 0) { queue.remove(position); adapter.setQueue(queue); EventBus.getDefault().post(new QueueEvent(QueueEvent.Action.REMOVED, item, position)); } else { Log.w(TAG, "Queue was not modified by call to removeQueueItem"); } } else { Log.e(TAG, "removeQueueItem: Could not load queue"); } adapter.close(); if (performAutoDownload) { DBTasks.autodownloadUndownloadedItems(context); } } }); } /** * Moves the specified item to the top of the queue. * * @param context A context that is used for opening a database connection. * @param itemId The item to move to the top of the queue * @param broadcastUpdate true if this operation should trigger a QueueUpdateBroadcast. This option should be set to * false if the caller wants to avoid unexpected updates of the GUI. */ public static Future<?> moveQueueItemToTop(final Context context, final long itemId, final boolean broadcastUpdate) { return dbExec.submit(new Runnable() { @Override public void run() { LongList queueIdList = DBReader.getQueueIDList(context); int index = queueIdList.indexOf(itemId); if (index >=0) { moveQueueItemHelper(context, index, 0, broadcastUpdate); } else { Log.e(TAG, "moveQueueItemToTop: item not found"); } } }); } /** * Moves the specified item to the bottom of the queue. * * @param context A context that is used for opening a database connection. * @param itemId The item to move to the bottom of the queue * @param broadcastUpdate true if this operation should trigger a QueueUpdateBroadcast. This option should be set to * false if the caller wants to avoid unexpected updates of the GUI. */ public static Future<?> moveQueueItemToBottom(final Context context, final long itemId, final boolean broadcastUpdate) { return dbExec.submit(new Runnable() { @Override public void run() { LongList queueIdList = DBReader.getQueueIDList(context); int index = queueIdList.indexOf(itemId); if(index >= 0) { moveQueueItemHelper(context, index, queueIdList.size() - 1, broadcastUpdate); } else { Log.e(TAG, "moveQueueItemToBottom: item not found"); } } }); } /** * Changes the position of a FeedItem in the queue. * * @param context A context that is used for opening a database connection. * @param from Source index. Must be in range 0..queue.size()-1. * @param to Destination index. Must be in range 0..queue.size()-1. * @param broadcastUpdate true if this operation should trigger a QueueUpdateBroadcast. This option should be set to * false if the caller wants to avoid unexpected updates of the GUI. * @throws IndexOutOfBoundsException if (to < 0 || to >= queue.size()) || (from < 0 || from >= queue.size()) */ public static Future<?> moveQueueItem(final Context context, final int from, final int to, final boolean broadcastUpdate) { return dbExec.submit(new Runnable() { @Override public void run() { moveQueueItemHelper(context, from, to, broadcastUpdate); } }); } /** * Changes the position of a FeedItem in the queue. * <p/> * This function must be run using the ExecutorService (dbExec). * * @param context A context that is used for opening a database connection. * @param from Source index. Must be in range 0..queue.size()-1. * @param to Destination index. Must be in range 0..queue.size()-1. * @param broadcastUpdate true if this operation should trigger a QueueUpdateBroadcast. This option should be set to * false if the caller wants to avoid unexpected updates of the GUI. * @throws IndexOutOfBoundsException if (to < 0 || to >= queue.size()) || (from < 0 || from >= queue.size()) */ private static void moveQueueItemHelper(final Context context, final int from, final int to, final boolean broadcastUpdate) { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); final List<FeedItem> queue = DBReader .getQueue(context, adapter); if (queue != null) { if (from >= 0 && from < queue.size() && to >= 0 && to < queue.size()) { final FeedItem item = queue.remove(from); queue.add(to, item); adapter.setQueue(queue); if (broadcastUpdate) { EventBus.getDefault().post(new QueueEvent(QueueEvent.Action.MOVED, item, to)); } } } else { Log.e(TAG, "moveQueueItemHelper: Could not load queue"); } adapter.close(); } /* * Sets the 'read'-attribute of all specified FeedItems * * @param context A context that is used for opening a database connection. * @param read New value of the 'read'-attribute * @param itemIds IDs of the FeedItems. */ public static Future<?> markItemRead(final Context context, final boolean read, final long... itemIds) { return dbExec.submit(new Runnable() { @Override public void run() { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); int played = read ? FeedItem.PLAYED : FeedItem.UNPLAYED; adapter.setFeedItemRead(played, itemIds); adapter.close(); EventDistributor.getInstance().sendUnreadItemsUpdateBroadcast(); } }); } /** * Sets the 'read'-attribute of a FeedItem to the specified value. * * @param context A context that is used for opening a database connection. * @param item The FeedItem object * @param read New value of the 'read'-attribute * @param resetMediaPosition true if this method should also reset the position of the FeedItem's FeedMedia object. * If the FeedItem has no FeedMedia object, this parameter will be ignored. */ public static Future<?> markItemRead(Context context, FeedItem item, boolean read, boolean resetMediaPosition) { long mediaId = (item.hasMedia()) ? item.getMedia().getId() : 0; return markItemRead(context, item.getId(), read, mediaId, resetMediaPosition); } private static Future<?> markItemRead(final Context context, final long itemId, final boolean read, final long mediaId, final boolean resetMediaPosition) { return dbExec.submit(new Runnable() { @Override public void run() { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setFeedItemRead(read, itemId, mediaId, resetMediaPosition); adapter.close(); EventDistributor.getInstance().sendUnreadItemsUpdateBroadcast(); } }); } /** * Sets the 'read'-attribute of all FeedItems of a specific Feed to true. * * @param context A context that is used for opening a database connection. * @param feedId ID of the Feed. */ public static Future<?> markFeedSeen(final Context context, final long feedId) { return dbExec.submit(new Runnable() { @Override public void run() { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); Cursor itemCursor = adapter.getNewItemsIdsCursor(feedId); long[] ids = new long[itemCursor.getCount()]; itemCursor.moveToFirst(); for (int i = 0; i < ids.length; i++) { ids[i] = itemCursor.getLong(0); itemCursor.moveToNext(); } itemCursor.close(); adapter.setFeedItemRead(FeedItem.UNPLAYED, ids); adapter.close(); EventDistributor.getInstance().sendUnreadItemsUpdateBroadcast(); } }); } /** * Sets the 'read'-attribute of all FeedItems of a specific Feed to true. * * @param context A context that is used for opening a database connection. * @param feedId ID of the Feed. */ public static Future<?> markFeedRead(final Context context, final long feedId) { return dbExec.submit(new Runnable() { @Override public void run() { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); Cursor itemCursor = adapter.getAllItemsOfFeedCursor(feedId); long[] itemIds = new long[itemCursor.getCount()]; itemCursor.moveToFirst(); for (int i = 0; i < itemIds.length; i++) { itemIds[i] = itemCursor.getLong(PodDBAdapter.KEY_ID_INDEX); itemCursor.moveToNext(); } itemCursor.close(); adapter.setFeedItemRead(FeedItem.PLAYED, itemIds); adapter.close(); EventDistributor.getInstance().sendUnreadItemsUpdateBroadcast(); } }); } /** * Sets the 'read'-attribute of all FeedItems to true. * * @param context A context that is used for opening a database connection. */ public static Future<?> markAllItemsRead(final Context context) { return dbExec.submit(new Runnable() { @Override public void run() { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); Cursor itemCursor = adapter.getUnreadItemsCursor(); long[] itemIds = new long[itemCursor.getCount()]; itemCursor.moveToFirst(); for (int i = 0; i < itemIds.length; i++) { itemIds[i] = itemCursor.getLong(PodDBAdapter.KEY_ID_INDEX); itemCursor.moveToNext(); } itemCursor.close(); adapter.setFeedItemRead(FeedItem.PLAYED, itemIds); adapter.close(); EventDistributor.getInstance().sendUnreadItemsUpdateBroadcast(); } }); } static Future<?> addNewFeed(final Context context, final Feed... feeds) { return dbExec.submit(new Runnable() { @Override public void run() { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setCompleteFeed(feeds); adapter.close(); if (ClientConfig.gpodnetCallbacks.gpodnetEnabled()) { for (Feed feed : feeds) { GpodnetPreferences.addAddedFeed(feed.getDownload_url()); } } BackupManager backupManager = new BackupManager(context); backupManager.dataChanged(); } }); } static Future<?> setCompleteFeed(final Context context, final Feed... feeds) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setCompleteFeed(feeds); adapter.close(); } }); } /** * Saves a FeedMedia object in the database. This method will save all attributes of the FeedMedia object. The * contents of FeedComponent-attributes (e.g. the FeedMedia's 'item'-attribute) will not be saved. * * @param context A context that is used for opening a database connection. * @param media The FeedMedia object. */ public static Future<?> setFeedMedia(final Context context, final FeedMedia media) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setMedia(media); adapter.close(); } }); } /** * Saves the 'position' and 'duration' attributes of a FeedMedia object * * @param context A context that is used for opening a database connection. * @param media The FeedMedia object. */ public static Future<?> setFeedMediaPlaybackInformation(final Context context, final FeedMedia media) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setFeedMediaPlaybackInformation(media); adapter.close(); } }); } /** * Saves a FeedItem object in the database. This method will save all attributes of the FeedItem object including * the content of FeedComponent-attributes. * * @param context A context that is used for opening a database connection. * @param item The FeedItem object. */ public static Future<?> setFeedItem(final Context context, final FeedItem item) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setSingleFeedItem(item); adapter.close(); } }); } /** * Saves a FeedImage object in the database. This method will save all attributes of the FeedImage object. The * contents of FeedComponent-attributes (e.g. the FeedImages's 'feed'-attribute) will not be saved. * * @param context A context that is used for opening a database connection. * @param image The FeedImage object. */ public static Future<?> setFeedImage(final Context context, final FeedImage image) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setImage(image); adapter.close(); } }); } /** * Updates download URLs of feeds from a given Map. The key of the Map is the original URL of the feed * and the value is the updated URL */ public static Future<?> updateFeedDownloadURLs(final Context context, final Map<String, String> urls) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); for (String key : urls.keySet()) { if (BuildConfig.DEBUG) Log.d(TAG, "Replacing URL " + key + " with url " + urls.get(key)); adapter.setFeedDownloadUrl(key, urls.get(key)); } adapter.close(); } }); } /** * Saves a FeedPreferences object in the database. The Feed ID of the FeedPreferences-object MUST NOT be 0. * * @param context Used for opening a database connection. * @param preferences The FeedPreferences object. */ public static Future<?> setFeedPreferences(final Context context, final FeedPreferences preferences) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setFeedPreferences(preferences); adapter.close(); EventDistributor.getInstance().sendFeedUpdateBroadcast(); } }); } private static boolean itemListContains(List<FeedItem> items, long itemId) { for (FeedItem item : items) { if (item.getId() == itemId) { return true; } } return false; } /** * Saves the FlattrStatus of a FeedItem object in the database. * * @param startFlattrClickWorker true if FlattrClickWorker should be started after the FlattrStatus has been saved */ public static Future<?> setFeedItemFlattrStatus(final Context context, final FeedItem item, final boolean startFlattrClickWorker) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setFeedItemFlattrStatus(item); adapter.close(); if (startFlattrClickWorker) { new FlattrClickWorker(context).executeAsync(); } } }); } /** * Saves the FlattrStatus of a Feed object in the database. * * @param startFlattrClickWorker true if FlattrClickWorker should be started after the FlattrStatus has been saved */ private static Future<?> setFeedFlattrStatus(final Context context, final Feed feed, final boolean startFlattrClickWorker) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setFeedFlattrStatus(feed); adapter.close(); if (startFlattrClickWorker) { new FlattrClickWorker(context).executeAsync(); } } }); } /** * Saves if a feed's last update failed * * @param lastUpdateFailed true if last update failed */ public static Future<?> setFeedLastUpdateFailed(final Context context, final long feedId, final boolean lastUpdateFailed) { return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setFeedLastUpdateFailed(feedId, lastUpdateFailed); adapter.close(); } }); } /** * format an url for querying the database * (postfix a / and apply percent-encoding) */ private static String formatURIForQuery(String uri) { try { return URLEncoder.encode(uri.endsWith("/") ? uri.substring(0, uri.length() - 1) : uri, "UTF-8"); } catch (UnsupportedEncodingException e) { Log.e(TAG, e.getMessage()); return ""; } } /** * Set flattr status of the passed thing (either a FeedItem or a Feed) * * @param context * @param thing * @param startFlattrClickWorker true if FlattrClickWorker should be started after the FlattrStatus has been saved * @return */ public static Future<?> setFlattredStatus(Context context, FlattrThing thing, boolean startFlattrClickWorker) { // must propagate this to back db if (thing instanceof FeedItem) return setFeedItemFlattrStatus(context, (FeedItem) thing, startFlattrClickWorker); else if (thing instanceof Feed) return setFeedFlattrStatus(context, (Feed) thing, startFlattrClickWorker); else if (thing instanceof SimpleFlattrThing) { } // SimpleFlattrThings are generated on the fly and do not have DB backing else Log.e(TAG, "flattrQueue processing - thing is neither FeedItem nor Feed nor SimpleFlattrThing"); return null; } /** * Reset flattr status to unflattrd for all items */ public static Future<?> clearAllFlattrStatus(final Context context) { Log.d(TAG, "clearAllFlattrStatus()"); return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.clearAllFlattrStatus(); adapter.close(); } }); } /** * Set flattr status of the feeds/feeditems in flattrList to flattred at the given timestamp, * where the information has been retrieved from the flattr API */ public static Future<?> setFlattredStatus(final Context context, final List<Flattr> flattrList) { Log.d(TAG, "setFlattredStatus to status retrieved from flattr api running with " + flattrList.size() + " items"); // clear flattr status in db clearAllFlattrStatus(context); // submit list with flattred things having normalized URLs to db return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); for (Flattr flattr : flattrList) { adapter.setItemFlattrStatus(formatURIForQuery(flattr.getThing().getUrl()), new FlattrStatus(flattr.getCreated().getTime())); } adapter.close(); } }); } /** * Sort the FeedItems in the queue with the given Comparator. * * @param context A context that is used for opening a database connection. * @param comparator FeedItem comparator * @param broadcastUpdate true if this operation should trigger a QueueUpdateBroadcast. This option should be set to * false if the caller wants to avoid unexpected updates of the GUI. */ public static Future<?> sortQueue(final Context context, final Comparator<FeedItem> comparator, final boolean broadcastUpdate) { return dbExec.submit(new Runnable() { @Override public void run() { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); final List<FeedItem> queue = DBReader.getQueue(context, adapter); if (queue != null) { Collections.sort(queue, comparator); adapter.setQueue(queue); if (broadcastUpdate) { EventBus.getDefault().post(new QueueEvent(QueueEvent.Action.SORTED)); } } else { Log.e(TAG, "sortQueue: Could not load queue"); } adapter.close(); } }); } /** * Sets the 'auto_download'-attribute of specific FeedItem. * * @param context A context that is used for opening a database connection. * @param feedItem FeedItem. */ public static Future<?> setFeedItemAutoDownload(final Context context, final FeedItem feedItem, final boolean autoDownload) { Log.d(TAG, "FeedItem[id=" + feedItem.getId() + "] SET auto_download " + autoDownload); return dbExec.submit(new Runnable() { @Override public void run() { final PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setFeedItemAutoDownload(feedItem, autoDownload); adapter.close(); EventDistributor.getInstance().sendUnreadItemsUpdateBroadcast(); } }); } /** * Set filter of the feed * * @param context Used for opening a database connection. * @param feedId The feed's ID * @param filterValues Values that represent properties to filter by */ public static Future<?> setFeedItemsFilter(final Context context, final long feedId, final List<String> filterValues) { Log.d(TAG, "setFeedFilter"); return dbExec.submit(new Runnable() { @Override public void run() { PodDBAdapter adapter = new PodDBAdapter(context); adapter.open(); adapter.setFeedItemFilter(feedId, filterValues); adapter.close(); EventBus.getDefault().post(new FeedEvent(FeedEvent.Action.FILTER_CHANGED, feedId)); } }); } }
package com.zimbra.qa.selenium.projects.ajax.tests.conversation.quickreply; import java.util.List; import org.testng.annotations.Test; import com.zimbra.qa.selenium.framework.items.MailItem; import com.zimbra.qa.selenium.framework.ui.*; import com.zimbra.qa.selenium.framework.util.*; import com.zimbra.qa.selenium.projects.ajax.core.PrefGroupMailByConversationTest; import com.zimbra.qa.selenium.projects.ajax.ui.mail.*; public class QuickForward extends PrefGroupMailByConversationTest { public QuickForward() { logger.info("New "+ QuickForward.class.getCanonicalName()); } @Test( description = "Quick Reply (Forward) a conversation (1 message, 1 recipient)", groups = { "smoke" }) public void QuickForward_01() throws HarnessException { ZimbraAccount destination = new ZimbraAccount(); destination.provision(); destination.authenticate(); ZimbraAccount account1 = new ZimbraAccount(); account1.provision(); account1.authenticate(); // Create the message data to be sent String subject = "subject" + ZimbraSeleniumProperties.getUniqueString(); String content = "content" + ZimbraSeleniumProperties.getUniqueString(); String forward = "quickforward" + ZimbraSeleniumProperties.getUniqueString(); account1.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>"+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); // Click Get Mail button app.zPageMail.zToolbarPressButton(Button.B_GETMAIL); // Select the conversation DisplayConversation display = (DisplayConversation)app.zPageMail.zListItem(Action.A_LEFTCLICK, subject); // Get the first mesage List<DisplayConversationMessage> messages = display.zListGetMessages(); // Quick Forward FormMailNew form = (FormMailNew)messages.get(0).zPressButton(Button.B_QUICK_REPLY_FORWARD); form.zFillField(FormMailNew.Field.To, destination.EmailAddress); form.zFillField(FormMailNew.Field.Body, forward); form.zToolbarPressButton(Button.B_SEND); MailItem mailItem; // Verify message is received by the destination mailItem = MailItem.importFromSOAP(destination, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNotNull(mailItem, "Verify the message is in the sent folder"); // Verify message is not received by the sender mailItem = MailItem.importFromSOAP(account1, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailItem, "Verify the message is received by the original sender"); } @Test( description = "Quick Reply (forward) a conversation (1 message, 2 recipients)", groups = { "functional" }) public void QuickForward_02() throws HarnessException { ZimbraAccount destination1 = new ZimbraAccount(); destination1.provision(); destination1.authenticate(); ZimbraAccount destination2 = new ZimbraAccount(); destination2.provision(); destination2.authenticate(); ZimbraAccount account1 = new ZimbraAccount(); account1.provision(); account1.authenticate(); ZimbraAccount account2 = new ZimbraAccount(); account2.provision(); account2.authenticate(); // Create the message data to be sent String subject = "subject" + ZimbraSeleniumProperties.getUniqueString(); String content = "content" + ZimbraSeleniumProperties.getUniqueString(); String forward = "quickforward" + ZimbraSeleniumProperties.getUniqueString(); account1.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ account2.EmailAddress +"'/>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>"+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); // Click Get Mail button app.zPageMail.zToolbarPressButton(Button.B_GETMAIL); // Select the conversation DisplayConversation display = (DisplayConversation)app.zPageMail.zListItem(Action.A_LEFTCLICK, subject); // Get the first mesage List<DisplayConversationMessage> messages = display.zListGetMessages(); // Quick Forward FormMailNew form = (FormMailNew)messages.get(0).zPressButton(Button.B_QUICK_REPLY_FORWARD); form.zFillField(FormMailNew.Field.To, destination1.EmailAddress + ";" + destination2.EmailAddress); form.zFillField(FormMailNew.Field.Body, forward); form.zToolbarPressButton(Button.B_SEND); MailItem mailItem; // Verify message is received by the destination mailItem = MailItem.importFromSOAP(destination1, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNotNull(mailItem, "Verify the message is in the sent folder"); mailItem = MailItem.importFromSOAP(destination2, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNotNull(mailItem, "Verify the message is in the sent folder"); // Verify message is not received by the sender mailItem = MailItem.importFromSOAP(account1, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailItem, "Verify the message is received by the original sender"); mailItem = MailItem.importFromSOAP(account2, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailItem, "Verify the message is received by the original sender"); } @Test( description = "Quick Reply (forward) a conversation (1 message, 1 recipient, 1 CC, 1 BCC)", groups = { "functional" }) public void QuickForward_03() throws HarnessException { ZimbraAccount destination = new ZimbraAccount(); destination.provision(); destination.authenticate(); ZimbraAccount account1 = new ZimbraAccount(); account1.provision(); account1.authenticate(); ZimbraAccount account2 = new ZimbraAccount(); account2.provision(); account2.authenticate(); ZimbraAccount account3 = new ZimbraAccount(); account3.provision(); account3.authenticate(); ZimbraAccount account4 = new ZimbraAccount(); account4.provision(); account4.authenticate(); // Create the message data to be sent String subject = "subject" + ZimbraSeleniumProperties.getUniqueString(); String content = "content" + ZimbraSeleniumProperties.getUniqueString(); String forward = "quickforward" + ZimbraSeleniumProperties.getUniqueString(); account1.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ account2.EmailAddress +"'/>" + "<e t='c' a='"+ account3.EmailAddress +"'/>" + "<e t='b' a='"+ account4.EmailAddress +"'/>" + "<e t='b' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>"+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); // Click Get Mail button app.zPageMail.zToolbarPressButton(Button.B_GETMAIL); // Select the conversation DisplayConversation display = (DisplayConversation)app.zPageMail.zListItem(Action.A_LEFTCLICK, subject); // Get the first mesage List<DisplayConversationMessage> messages = display.zListGetMessages(); // Quick Forward FormMailNew form = (FormMailNew)messages.get(0).zPressButton(Button.B_QUICK_REPLY_FORWARD); form.zFillField(FormMailNew.Field.To, destination.EmailAddress); form.zFillField(FormMailNew.Field.Body, forward); form.zToolbarPressButton(Button.B_SEND); MailItem mailItem; // Verify message is received by the destination mailItem = MailItem.importFromSOAP(destination, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNotNull(mailItem, "Verify the message is in the sent folder"); // Verify message is not received by the sender mailItem = MailItem.importFromSOAP(account1, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailItem, "Verify the message is received by the original sender"); mailItem = MailItem.importFromSOAP(account2, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailItem, "Verify the message is received by the original sender"); mailItem = MailItem.importFromSOAP(account3, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailItem, "Verify the message is received by the original sender"); mailItem = MailItem.importFromSOAP(account4, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailItem, "Verify the message is received by the original sender"); } @Test( description = "Quick Forward two a 3 message conversation - first message", groups = { "functional" }) public void QuickForward_10() throws HarnessException { ZimbraAccount destination = new ZimbraAccount(); destination.provision(); destination.authenticate(); ZimbraAccount account1 = new ZimbraAccount(); account1.provision(); account1.authenticate(); ZimbraAccount account2 = new ZimbraAccount(); account2.provision(); account2.authenticate(); ZimbraAccount account3 = new ZimbraAccount(); account3.provision(); account3.authenticate(); // Create the message data to be sent String subject = "subject" + ZimbraSeleniumProperties.getUniqueString(); String content1 = "onecontent" + ZimbraSeleniumProperties.getUniqueString(); String content2 = "twocontent" + ZimbraSeleniumProperties.getUniqueString(); String content3 = "threecontent" + ZimbraSeleniumProperties.getUniqueString(); String forward = "quickforward" + ZimbraSeleniumProperties.getUniqueString(); account1.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>RE: "+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content1 +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); account2.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>RE: "+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content2 +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); account3.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>RE: "+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content3 +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); // Click Get Mail button app.zPageMail.zToolbarPressButton(Button.B_GETMAIL); // Select the conversation DisplayConversation display = (DisplayConversation)app.zPageMail.zListItem(Action.A_LEFTCLICK, subject); // Get the first mesage List<DisplayConversationMessage> messages = display.zListGetMessages(); // Quick Forward FormMailNew form = (FormMailNew)messages.get(0).zPressButton(Button.B_QUICK_REPLY_FORWARD); form.zFillField(FormMailNew.Field.To, destination.EmailAddress); form.zFillField(FormMailNew.Field.Body, forward); form.zToolbarPressButton(Button.B_SEND); MailItem mailitem; // Verify message is received by destination mailitem = MailItem.importFromSOAP(destination, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNotNull(mailitem, "Verify message is received by account3"); // Verify message is not received by account1, account2, nor account3 mailitem = MailItem.importFromSOAP(account3, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailitem, "Verify message is received by account3"); mailitem = MailItem.importFromSOAP(account2, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailitem, "Verify message is not received by account2 and account1"); mailitem = MailItem.importFromSOAP(account1, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailitem, "Verify message is not received by account2 and account1"); } @Test( description = "Quick Forward two a 3 message conversation - middle message", groups = { "functional" }) public void QuickForward_11() throws HarnessException { ZimbraAccount destination = new ZimbraAccount(); destination.provision(); destination.authenticate(); ZimbraAccount account1 = new ZimbraAccount(); account1.provision(); account1.authenticate(); ZimbraAccount account2 = new ZimbraAccount(); account2.provision(); account2.authenticate(); ZimbraAccount account3 = new ZimbraAccount(); account3.provision(); account3.authenticate(); // Create the message data to be sent String subject = "subject" + ZimbraSeleniumProperties.getUniqueString(); String content1 = "onecontent" + ZimbraSeleniumProperties.getUniqueString(); String content2 = "twocontent" + ZimbraSeleniumProperties.getUniqueString(); String content3 = "threecontent" + ZimbraSeleniumProperties.getUniqueString(); String forward = "quickforward" + ZimbraSeleniumProperties.getUniqueString(); account1.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>RE: "+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content1 +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); account2.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>RE: "+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content2 +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); account3.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>RE: "+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content3 +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); // Click Get Mail button app.zPageMail.zToolbarPressButton(Button.B_GETMAIL); // Select the conversation DisplayConversation display = (DisplayConversation)app.zPageMail.zListItem(Action.A_LEFTCLICK, subject); // Get the first mesage List<DisplayConversationMessage> messages = display.zListGetMessages(); // Quick Forward FormMailNew form = (FormMailNew)messages.get(0).zPressButton(Button.B_QUICK_REPLY_FORWARD); form.zFillField(FormMailNew.Field.To, destination.EmailAddress); form.zFillField(FormMailNew.Field.Body, forward); form.zToolbarPressButton(Button.B_SEND); MailItem mailitem; // Verify message is received by destination mailitem = MailItem.importFromSOAP(destination, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNotNull(mailitem, "Verify message is received by account3"); // Verify message is not received by account1, account2, nor account3 mailitem = MailItem.importFromSOAP(account3, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailitem, "Verify message is received by account3"); mailitem = MailItem.importFromSOAP(account2, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailitem, "Verify message is not received by account2 and account1"); mailitem = MailItem.importFromSOAP(account1, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailitem, "Verify message is not received by account2 and account1"); } @Test( description = "Quick Forward two a 3 message conversation - last message", groups = { "functional" }) public void QuickForward_12() throws HarnessException { ZimbraAccount destination = new ZimbraAccount(); destination.provision(); destination.authenticate(); ZimbraAccount account1 = new ZimbraAccount(); account1.provision(); account1.authenticate(); ZimbraAccount account2 = new ZimbraAccount(); account2.provision(); account2.authenticate(); ZimbraAccount account3 = new ZimbraAccount(); account3.provision(); account3.authenticate(); // Create the message data to be sent String subject = "subject" + ZimbraSeleniumProperties.getUniqueString(); String content1 = "onecontent" + ZimbraSeleniumProperties.getUniqueString(); String content2 = "twocontent" + ZimbraSeleniumProperties.getUniqueString(); String content3 = "threecontent" + ZimbraSeleniumProperties.getUniqueString(); String forward = "quickforward" + ZimbraSeleniumProperties.getUniqueString(); account1.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>RE: "+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content1 +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); account2.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>RE: "+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content2 +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); account3.soapSend( "<SendMsgRequest xmlns='urn:zimbraMail'>" + "<m>" + "<e t='t' a='"+ app.zGetActiveAccount().EmailAddress +"'/>" + "<su>RE: "+ subject +"</su>" + "<mp ct='text/plain'>" + "<content>"+ content3 +"</content>" + "</mp>" + "</m>" + "</SendMsgRequest>"); // Click Get Mail button app.zPageMail.zToolbarPressButton(Button.B_GETMAIL); // Select the conversation DisplayConversation display = (DisplayConversation)app.zPageMail.zListItem(Action.A_LEFTCLICK, subject); // Get the first mesage List<DisplayConversationMessage> messages = display.zListGetMessages(); // Quick Forward FormMailNew form = (FormMailNew)messages.get(0).zPressButton(Button.B_QUICK_REPLY_FORWARD); form.zFillField(FormMailNew.Field.To, destination.EmailAddress); form.zFillField(FormMailNew.Field.Body, forward); form.zToolbarPressButton(Button.B_SEND); MailItem mailitem; // Verify message is received by destination mailitem = MailItem.importFromSOAP(destination, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNotNull(mailitem, "Verify message is received by account3"); // Verify message is not received by account1, account2, nor account3 mailitem = MailItem.importFromSOAP(account3, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailitem, "Verify message is received by account3"); mailitem = MailItem.importFromSOAP(account2, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailitem, "Verify message is not received by account2 and account1"); mailitem = MailItem.importFromSOAP(account1, "subject:("+ subject +") from:("+ app.zGetActiveAccount().EmailAddress +")"); ZAssert.assertNull(mailitem, "Verify message is not received by account2 and account1"); } }
package org.broadinstitute.hellbender.tools.spark.sv.discovery.inference; import com.google.common.annotations.VisibleForTesting; import htsjdk.variant.variantcontext.Allele; import htsjdk.variant.variantcontext.VariantContext; import htsjdk.variant.variantcontext.VariantContextBuilder; import htsjdk.variant.vcf.VCFConstants; import org.apache.commons.lang3.math.NumberUtils; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.broadcast.Broadcast; import org.broadinstitute.hellbender.engine.BasicReference; import org.broadinstitute.hellbender.engine.spark.datasources.ReferenceMultiSparkSource; import org.broadinstitute.hellbender.tools.spark.sv.discovery.SimpleSVType; import org.broadinstitute.hellbender.tools.spark.sv.discovery.SvDiscoverFromLocalAssemblyContigAlignmentsSpark; import org.broadinstitute.hellbender.tools.spark.sv.discovery.SvDiscoveryInputMetaData; import org.broadinstitute.hellbender.tools.spark.sv.discovery.alignment.AlignedContig; import org.broadinstitute.hellbender.tools.spark.sv.discovery.alignment.AssemblyContigWithFineTunedAlignments; import org.broadinstitute.hellbender.tools.spark.sv.utils.SVInterval; import org.broadinstitute.hellbender.tools.spark.sv.utils.SVUtils; import org.broadinstitute.hellbender.utils.SimpleInterval; import org.broadinstitute.hellbender.utils.read.GATKRead; import scala.Tuple2; import scala.Tuple3; import java.io.Serializable; import java.util.*; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import static org.broadinstitute.hellbender.tools.spark.sv.StructuralVariationDiscoveryArgumentCollection.STRUCTURAL_VARIANT_SIZE_LOWER_BOUND; import static org.broadinstitute.hellbender.tools.spark.sv.utils.GATKSVVCFConstants.*; /** * For extracting simple variants from input GATK-SV complex variants. * * Some explanation on several concepts: * * <p> * Anchor ref base: * anchor base is defined per-VCF spec (see 1.4.1#REF version 4.2), that is, for DEL and INS variants * the reference base at the position pointed to by POS, basically: * for DEL, the reference bases immediately following POS are deleted (up to and including the END base), * for INS, the sequence annotated in INSSEQ are inserted immediately after POS. * </p> * * <p> * "Fat" insertion: * they exist because sometimes we have micro deletions surrounding the insertion breakpoint, * so here the strategy is to report them as "fat", i.e. the anchor base and deleted bases are reported in REF; * they are fat in the sense that compared to simple insertions where a single anchor ref base is necessary * </p> * * <p> * It is also assumed that POS and END of the input complex {@link VariantContext} are the boundaries * of the bases where REF and ALT allele share similarity, in other words, * immediately after POS and before END is where the REF and ALT allele differ, and the two path merges at POS/END. * </p> */ public abstract class SegmentedCpxVariantSimpleVariantExtractor implements Serializable { private static final long serialVersionUID = 1L; private static int EVENT_SIZE_THRESHOLD = STRUCTURAL_VARIANT_SIZE_LOWER_BOUND - 1; private static final String CPX_DERIVED_POSTFIX_STRING = "CPX_DERIVED"; private static String makeID(final String typeName, final String chr, final int start, final int stop) { return typeName + INTERVAL_VARIANT_ID_FIELD_SEPARATOR + chr + INTERVAL_VARIANT_ID_FIELD_SEPARATOR + start + INTERVAL_VARIANT_ID_FIELD_SEPARATOR + stop + INTERVAL_VARIANT_ID_FIELD_SEPARATOR + CPX_DERIVED_POSTFIX_STRING; } public static final class ExtractedSimpleVariants { private final List<VariantContext> reInterpretZeroOrOneSegmentCalls; private final List<VariantContext> reInterpretMultiSegmentsCalls; public ExtractedSimpleVariants(final List<VariantContext> reInterpretZeroOrOneSegmentCalls, final List<VariantContext> reInterpretMultiSegmentsCalls) { this.reInterpretZeroOrOneSegmentCalls = reInterpretZeroOrOneSegmentCalls; this.reInterpretMultiSegmentsCalls = reInterpretMultiSegmentsCalls; } public List<VariantContext> getReInterpretZeroOrOneSegmentCalls() { return reInterpretZeroOrOneSegmentCalls; } public List<VariantContext> getReInterpretMultiSegmentsCalls() { return reInterpretMultiSegmentsCalls; } public List<VariantContext> getMergedReinterpretedCalls() { final ArrayList<VariantContext> merged = new ArrayList<>(reInterpretZeroOrOneSegmentCalls); merged.addAll(reInterpretMultiSegmentsCalls); return merged; } } // main interface to user code public static ExtractedSimpleVariants extract(final JavaRDD<VariantContext> complexVariants, final SvDiscoveryInputMetaData svDiscoveryInputMetaData, final JavaRDD<GATKRead> assemblyRawAlignments) { final Broadcast<ReferenceMultiSparkSource> referenceBroadcast = svDiscoveryInputMetaData.getReferenceData().getReferenceBroadcast(); // still does an in-efficient 2-pass on the input RDD: 1 pass for zero- and one-segment calls, the other for multi-segment calls // that was due to restriction from how multi-segment calls are to be re-interpreted final ZeroAndOneSegmentCpxVariantExtractor zeroAndOneSegmentCpxVariantExtractor = new ZeroAndOneSegmentCpxVariantExtractor(); final JavaRDD<VariantContext> zeroOrOneSegmentComplexVariants = complexVariants .filter(vc -> SVUtils.getAttributeAsStringList(vc, CPX_SV_REF_SEGMENTS).size() < 2) .cache(); final List<VariantContext> reInterpretedZeroAndOneSegmentCalls = zeroOrOneSegmentComplexVariants .flatMap(vc -> zeroAndOneSegmentCpxVariantExtractor.extract(vc, referenceBroadcast.getValue()).iterator()) .collect(); zeroOrOneSegmentComplexVariants.unpersist(false); final JavaRDD<VariantContext> multiSegmentCalls = complexVariants.filter(vc -> SVUtils.getAttributeAsStringList(vc, CPX_SV_REF_SEGMENTS).size() > 1) .cache(); final MultiSegmentsCpxVariantExtractor multiSegmentsCpxVariantExtractor = new MultiSegmentsCpxVariantExtractor(); final List<VariantContext> sourceWithLessAnnotations = multiSegmentCalls .flatMap(vc -> multiSegmentsCpxVariantExtractor.extract(vc, referenceBroadcast.getValue()).iterator()).collect(); final List<VariantContext> sourceWithMoreAnnotations = reInterpretMultiSegmentComplexVarThroughAlignmentPairIteration(multiSegmentCalls, svDiscoveryInputMetaData, assemblyRawAlignments); final List<VariantContext> reInterpretMultiSegmentsCalls = removeDuplicates(sourceWithLessAnnotations, sourceWithMoreAnnotations); multiSegmentCalls.unpersist(false); return new ExtractedSimpleVariants(reInterpretedZeroAndOneSegmentCalls, reInterpretMultiSegmentsCalls); } //================================================================================================================== public static final class RelevantAttributes implements Serializable { private static final long serialVersionUID = 1L; private final String id; private final List<SimpleInterval> referenceSegments; private final List<String> altArrangements; public RelevantAttributes(final VariantContext multiSegmentComplexVar) { id = multiSegmentComplexVar.getID(); referenceSegments = SVUtils.getAttributeAsStringList(multiSegmentComplexVar, CPX_SV_REF_SEGMENTS) .stream().map(SimpleInterval::new).collect(Collectors.toList()); altArrangements = SVUtils.getAttributeAsStringList(multiSegmentComplexVar, CPX_EVENT_ALT_ARRANGEMENTS); } } /** * Send relevant contigs for re-interpretation via the pair-iteration way of scanning the alignments for interpretation. * * Re-interpret CPX vcf records whose * {@link org.broadinstitute.hellbender.tools.spark.sv.utils.GATKSVVCFConstants#CPX_SV_REF_SEGMENTS} * has more than one entries, aka "multi-segment" calls. * * Exist basically to extract insertions, because * deletions and inversions are relatively easy to be extracted by * {@link org.broadinstitute.hellbender.tools.spark.sv.discovery.inference.SegmentedCpxVariantSimpleVariantExtractor.MultiSegmentsCpxVariantExtractor} * * @return the {@link SimpleSVType}-d variants extracted from the input that are consistent with the description in the complex variants */ public static List<VariantContext> reInterpretMultiSegmentComplexVarThroughAlignmentPairIteration( final JavaRDD<VariantContext> multiSegmentCalls, final SvDiscoveryInputMetaData svDiscoveryInputMetaData, final JavaRDD<GATKRead> assemblyRawAlignments) { final Map<String, RelevantAttributes> contigNameToCpxVariantAttributes = multiSegmentCalls .flatMapToPair(complex -> { final RelevantAttributes relevantAttributes = new RelevantAttributes(complex); return SVUtils.getAttributeAsStringList(complex, CONTIG_NAMES).stream() .map(name -> new Tuple2<>(name, relevantAttributes)) .iterator(); }) .collectAsMap(); // resend the relevant contigs through the pair-iteration-ed path final Set<String> relevantContigs = new HashSet<>( contigNameToCpxVariantAttributes.keySet() ); final JavaRDD<GATKRead> relevantAlignments = assemblyRawAlignments.filter(read -> relevantContigs.contains(read.getName())); final JavaRDD<AlignedContig> analysisReadyContigs = SvDiscoverFromLocalAssemblyContigAlignmentsSpark .preprocess(svDiscoveryInputMetaData, relevantAlignments) .getContigsWithSignatureClassifiedAsComplex() .map(AssemblyContigWithFineTunedAlignments::getSourceContig); List<VariantContext> pairIterationReInterpreted = ContigChimericAlignmentIterativeInterpreter .discoverVariantsFromChimeras(svDiscoveryInputMetaData, analysisReadyContigs); final Broadcast<ReferenceMultiSparkSource> referenceBroadcast = svDiscoveryInputMetaData.getReferenceData().getReferenceBroadcast(); return pairIterationReInterpreted.stream() .map(vc -> { final List<String> consistentComplexVariantIDs = SVUtils.getAttributeAsStringList(vc, CONTIG_NAMES).stream() .map(contigNameToCpxVariantAttributes::get) .filter(attributes -> isConsistentWithCPX(vc, attributes)) .map(attributes -> attributes.id) .collect(Collectors.toList()); if ( consistentComplexVariantIDs.isEmpty()) { return null; } else { return new VariantContextBuilder(vc) .id(vc.getID() + INTERVAL_VARIANT_ID_FIELD_SEPARATOR + CPX_DERIVED_POSTFIX_STRING) .attribute(CPX_EVENT_KEY, String.join(VCFConstants.INFO_FIELD_ARRAY_SEPARATOR, consistentComplexVariantIDs)) .make(); } }) .filter(Objects::nonNull) .map(SegmentedCpxVariantSimpleVariantExtractor::postProcessConvertShortDupToIns) .flatMap(simple -> postProcessConvertReplacementToFatInsOrInsAndDel(simple, referenceBroadcast.getValue())) .collect(Collectors.toList()); } public static List<VariantContext> filterForConsistency( final List<VariantContext> variants, final Map<String, RelevantAttributes> contigNameToCpxVariantAttributes, final BasicReference reference ) { return variants.stream() .map(vc -> { final List<String> consistentComplexVariantIDs = SVUtils.getAttributeAsStringList(vc, CONTIG_NAMES).stream() .map(contigNameToCpxVariantAttributes::get) .filter(attributes -> isConsistentWithCPX(vc, attributes)) .map(attributes -> attributes.id) .collect(Collectors.toList()); if ( consistentComplexVariantIDs.isEmpty()) { return null; } else { return new VariantContextBuilder(vc) .id(vc.getID() + INTERVAL_VARIANT_ID_FIELD_SEPARATOR + CPX_DERIVED_POSTFIX_STRING) .attribute(CPX_EVENT_KEY, String.join(VCFConstants.INFO_FIELD_ARRAY_SEPARATOR, consistentComplexVariantIDs)) .make(); } }) .filter(Objects::nonNull) .map(SegmentedCpxVariantSimpleVariantExtractor::postProcessConvertShortDupToIns) .flatMap(simple -> postProcessConvertReplacementToFatInsOrInsAndDel(simple, reference)) .collect(Collectors.toList()); } /** * Convert short, i.e. duplicated range is < 50 bp, duplication call to insertion call. */ @VisibleForTesting static VariantContext postProcessConvertShortDupToIns(final VariantContext simple) { final String type = simple.getAttributeAsString(SVTYPE, ""); if ( type.equals(SimpleSVType.SupportedType.DUP.name()) ) { final SimpleInterval duplicatedRegion = new SimpleInterval(simple.getAttributeAsString(DUP_REPEAT_UNIT_REF_SPAN, "")); if (duplicatedRegion.size() > EVENT_SIZE_THRESHOLD) { return simple; } else { return new VariantContextBuilder(simple) .alleles(Arrays.asList(simple.getReference(), altSymbAlleleIns)) .rmAttribute(SVTYPE) .attribute(SVTYPE, SimpleSVType.SupportedType.INS.name()) .make(); } } else return simple; } /** * Pair-iteration way of extracting simple variants reports replacement calls as a single DEL with * inserted sequence annotations. * This utility breaks that into: * when the inserted sequence is long enough, an extra insertion call * when the deleted range is not long enough, replace with fat insertion. */ @VisibleForTesting static Stream<VariantContext> postProcessConvertReplacementToFatInsOrInsAndDel(final VariantContext simple, final BasicReference reference) { final String type = simple.getAttributeAsString(SVTYPE, ""); if ( type.equals(SimpleSVType.SupportedType.DEL.name()) ) { final int deletionLen = - simple.getAttributeAsInt(SVLEN, 0); final int insLen = simple.getAttributeAsInt(INSERTED_SEQUENCE_LENGTH, 0); if (insLen > EVENT_SIZE_THRESHOLD && deletionLen > EVENT_SIZE_THRESHOLD) { // case 1: insertion and deletion, linked final Map<String, Object> attributes = new HashMap<>( simple.getAttributes() ); attributes.remove(INSERTED_SEQUENCE_MAPPINGS); attributes.remove(SVLEN); attributes.remove(SVTYPE); VariantContextBuilder newInsertion = makeInsertion(simple.getContig(), simple.getStart(), simple.getStart(), insLen, simple.getReference()); attributes.forEach(newInsertion::attribute); newInsertion.rmAttribute(HOMOLOGY).rmAttribute(HOMOLOGY_LENGTH); newInsertion.rmAttribute(VCFConstants.END_KEY).attribute(VCFConstants.END_KEY, simple.getStart()); VariantContextBuilder newDeletion = makeDeletion(new SimpleInterval(simple.getContig(), simple.getStart(), simple.getEnd()), simple.getReference()); attributes.forEach(newDeletion::attribute); newDeletion.rmAttribute(INSERTED_SEQUENCE).rmAttribute(INSERTED_SEQUENCE_LENGTH).rmAttribute(SEQ_ALT_HAPLOTYPE); // cross linking newInsertion.attribute(LINK, makeID(SimpleSVType.SupportedType.DEL.name(), simple.getContig(), simple.getStart(), simple.getEnd())); newDeletion.attribute(LINK, makeID(SimpleSVType.SupportedType.INS.name(), simple.getContig(), simple.getStart(), simple.getStart())); return Stream.of(newDeletion.make(), newInsertion.make()); } else if (insLen > EVENT_SIZE_THRESHOLD && deletionLen <= EVENT_SIZE_THRESHOLD) { // case 2: insertion with micro deletion String fatInsertionID = simple.getID().replace("DEL", "INS"); final Map<String, Object> attributes = new HashMap<>( simple.getAttributes() ); attributes.remove(INSERTED_SEQUENCE_MAPPINGS); attributes.remove(HOMOLOGY_LENGTH); attributes.remove(HOMOLOGY); attributes.remove(SVLEN); attributes.remove(SVTYPE); byte[] referenceBases = reference.getBases(new SimpleInterval(simple.getContig(), simple.getStart(), simple.getEnd())); VariantContextBuilder fatInsertion = makeInsertion(simple.getContig(), simple.getStart(), simple.getEnd(), insLen, Allele.create(referenceBases, true)); attributes.forEach(fatInsertion::attribute); fatInsertion.id(fatInsertionID); return Stream.of(fatInsertion.make()); } else if (insLen <= EVENT_SIZE_THRESHOLD && deletionLen > EVENT_SIZE_THRESHOLD) { // case 3:deletion with micro insertion return Stream.of(simple); } else { // case 4: neither is large enough, rare but possible return Stream.empty(); } } else return Stream.of(simple); } // TODO: 3/26/18 here we check consistency only for DEL calls, and reject all INV calls (they will be extracted via MultiSegmentsCpxVariantExtractor), and INS consistency check is difficult /** * @param simple simple variant derived from pair-iteration logic that is to be checked * @param attributes source CPX variant attributes */ @VisibleForTesting static boolean isConsistentWithCPX(final VariantContext simple, final RelevantAttributes attributes) { final String typeString = simple.getAttributeAsString(SVTYPE, ""); if ( typeString.equals(SimpleSVType.SupportedType.DEL.name()) ) { final List<SimpleInterval> refSegments = attributes.referenceSegments; final List<String> altArrangement = attributes.altArrangements; final Tuple3<Set<SimpleInterval>, Set<Integer>, List<Integer>> missingAndPresentAndInvertedSegments = getMissingAndPresentAndInvertedSegments(refSegments, altArrangement); final Set<SimpleInterval> missingSegments = missingAndPresentAndInvertedSegments._1(); return deletionConsistencyCheck(simple, missingSegments); } else if ( typeString.equals(SimpleSVType.SupportedType.INV.name()) ) { return false; } else return true; } @VisibleForTesting static boolean deletionConsistencyCheck(final VariantContext simple, final Set<SimpleInterval> missingSegments) { if (missingSegments.isEmpty()) return false; final SimpleInterval deletedRange = new SimpleInterval(simple.getContig(), simple.getStart() + 1, simple.getEnd()); // dummy number for chr to be used in constructing SVInterval, since 2 input AI's both map to the same chr by this point final int dummyChr = 0; final SVInterval intervalOne = new SVInterval(dummyChr, deletedRange.getStart() - 1, deletedRange.getEnd()); for (final SimpleInterval missing : missingSegments) { if ( ! missing.overlaps(deletedRange) ) return false; final SVInterval intervalTwo = new SVInterval(dummyChr, missing.getStart() - 1, missing.getEnd()); // allow 1-base fuzziness from either end if ( Math.abs(missing.size() - deletedRange.size()) > 2 ) return false; if( 2 >= Math.abs( Math.min(missing.size(), deletedRange.size()) - intervalTwo.overlapLen(intervalOne) ) ){ return true; } } return false; } /** * Exist for equals() and hashCode() */ private static final class AnnotatedInterval { private final VariantContext sourceVC; // NOTE: omitted in equals() and hashCode() on purpose final SimpleInterval interval; final String id; final String type; final int svlen; final List<Allele> alleles; private AnnotatedInterval(final VariantContext vc) { sourceVC = vc; interval = new SimpleInterval( vc.getContig(), vc.getStart(), vc.getEnd()); id = vc.getID(); type = vc.getAttributeAsString(SVTYPE, ""); svlen = vc.getAttributeAsInt(SVLEN, 0); alleles = vc.getAlleles(); } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final AnnotatedInterval interval1 = (AnnotatedInterval) o; if (svlen != interval1.svlen) return false; if (!interval.equals(interval1.interval)) return false; if (!id.equals(interval1.id)) return false; if (!type.equals(interval1.type)) return false; return alleles.equals(interval1.alleles); } @Override public int hashCode() { int result = interval.hashCode(); result = 31 * result + id.hashCode(); result = 31 * result + type.hashCode(); result = 31 * result + svlen; result = 31 * result + alleles.hashCode(); return result; } } /** * For constructing a map from {@link AnnotatedInterval} to source complex variant IDs and * their associated assembly contig names. */ private static Map<AnnotatedInterval, Tuple2<TreeSet<String>, TreeSet<String>>> getAnnotatedIntervalToSourceCpxIDsAndContigNames(final List<VariantContext> extractedSimpleVariants) { // TODO: 5/11/18 this is suboptimal: // a round trip to AnnotatedInterval because some CPX variants themselves are duplicated, // i.e. their alt seq, extracted from different assembly contigs, only differ slightly. return extractedSimpleVariants.stream().map(AnnotatedInterval::new).collect(Collectors.toCollection(HashSet::new)) .stream().map(ai -> ai.sourceVC) .collect(Collectors.toMap(AnnotatedInterval::new, simpleVC -> { final TreeSet<String> complexEvents = new TreeSet<>(SVUtils.getAttributeAsStringList(simpleVC, CPX_EVENT_KEY)); final TreeSet<String> sourceCtgNames = new TreeSet<>(SVUtils.getAttributeAsStringList(simpleVC, CONTIG_NAMES)); return new Tuple2<>(complexEvents, sourceCtgNames); }) ); // hashMap is good enough for us } /** * Exist because the two ways to re-interpret simple variants via * {@link MultiSegmentsCpxVariantExtractor} * and via * {@link #reInterpretMultiSegmentComplexVarThroughAlignmentPairIteration(JavaRDD, SvDiscoveryInputMetaData, JavaRDD)} * could give essentially the same variants. */ @VisibleForTesting public static List<VariantContext> removeDuplicates(final List<VariantContext> sourceWithLessAnnotations, final List<VariantContext> sourceWithMoreAnnotations) { final Map<AnnotatedInterval, Tuple2<TreeSet<String>, TreeSet<String>>> rangeToAnnotationsFromSourceWithLessAnnotations = getAnnotatedIntervalToSourceCpxIDsAndContigNames(sourceWithLessAnnotations); final Map<AnnotatedInterval, Tuple2<TreeSet<String>, TreeSet<String>>> rangeToAnnotationsFromSourceWithMoreAnnotations = getAnnotatedIntervalToSourceCpxIDsAndContigNames(sourceWithMoreAnnotations); final List<VariantContext> result = new ArrayList<>(sourceWithMoreAnnotations.size() + sourceWithLessAnnotations.size()); for (final Map.Entry<AnnotatedInterval, Tuple2<TreeSet<String>, TreeSet<String>>> entry: rangeToAnnotationsFromSourceWithMoreAnnotations.entrySet()) { final AnnotatedInterval interval = entry.getKey(); final Tuple2<TreeSet<String>, TreeSet<String>> sourceAttributes = entry.getValue(); final Tuple2<TreeSet<String>, TreeSet<String>> anotherSourceAttributes = rangeToAnnotationsFromSourceWithLessAnnotations.get(interval); if (anotherSourceAttributes == null) { // variant unique to one source result.add( interval.sourceVC ); } else { // found duplicate, merge annotations final TreeSet<String> sourceCpxIDs = sourceAttributes._1; final TreeSet<String> sourceCtgNames = sourceAttributes._2; sourceCpxIDs.addAll(anotherSourceAttributes._1); sourceCtgNames.addAll(anotherSourceAttributes._2); final VariantContextBuilder variant = new VariantContextBuilder(interval.sourceVC) .rmAttribute(CPX_EVENT_KEY) .attribute(CPX_EVENT_KEY, String.join(VCFConstants.INFO_FIELD_ARRAY_SEPARATOR, sourceCpxIDs)) .rmAttribute(CONTIG_NAMES) .attribute(CONTIG_NAMES, String.join(VCFConstants.INFO_FIELD_ARRAY_SEPARATOR, sourceCtgNames)); result.add( variant.make()); rangeToAnnotationsFromSourceWithLessAnnotations.remove(interval); // remove from the other source } } // now anotherSource has only unique records rangeToAnnotationsFromSourceWithLessAnnotations.keySet().forEach(interval -> result.add(interval.sourceVC)); return result; } //================================================================================================================== abstract List<VariantContext> extract(final VariantContext complexVC, final BasicReference reference); @VisibleForTesting public static final class ZeroAndOneSegmentCpxVariantExtractor extends SegmentedCpxVariantSimpleVariantExtractor { private static final long serialVersionUID = 1L; /** * Depending on how the ref segment is present in alt arrangement (if at all), logic as follows (order is important): * <ul> * <li> * if ref segment appear inverted and large enough * <ul> * <li> INV call is warranted </li> * <li> INS call(s) before and after the INV, if inserted sequence long enough </li> * </ul> * </li> * * <li> * otherwise if ref segment is present as-is, i.e. no deletion call can be made, * make insertion calls when possible * </li> * <li> * otherwise * <ul> * <li> if the segment is large enough, make a DEL call, and insertion calls when possible </li> * <li> otherwise a single fat INS call</li> * </ul> * </li> * </ul> * * <p> * Note that the above logic has a bias towards getting INV calls, because * when the (large enough) reference segment appears both as-is and inverted, * the above logic will emit at least an INV call, * whereas the (inverted) duplication(s) could also be reported as an DUP call as well, but... * </p> */ @Override public List<VariantContext> extract(final VariantContext complexVC, final BasicReference reference) { final List<String> segments = SVUtils.getAttributeAsStringList(complexVC, CPX_SV_REF_SEGMENTS); if (segments.isEmpty()) return whenZeroSegments(complexVC, reference); final SimpleInterval refSegment = new SimpleInterval(segments.get(0)); final List<String> altArrangement = SVUtils.getAttributeAsStringList(complexVC, CPX_EVENT_ALT_ARRANGEMENTS); final int altSeqLength = complexVC.getAttributeAsString(SEQ_ALT_HAPLOTYPE, "").length(); final List<VariantContextBuilder> result = new ArrayList<>(); final int asIsAppearanceIdx = altArrangement.indexOf("1"); final int invertedAppearanceIdx = altArrangement.indexOf("-1"); if (invertedAppearanceIdx != -1 && refSegment.size() > EVENT_SIZE_THRESHOLD) { // inversion call whenInversionIsWarranted(refSegment, invertedAppearanceIdx, altArrangement, reference, result); } else if (asIsAppearanceIdx != -1) { // no inverted appearance or appear inverted but not large enough, and in the mean time appear as-is, so no deletion whenNoDeletionIsAllowed(refSegment, asIsAppearanceIdx, altArrangement, altSeqLength, reference, result); } else { // no as-is appearance && (inverted appearance might present not not large enough) whenNoInvAndNoAsIsAppearance(refSegment, altSeqLength, reference, result); } final String sourceID = complexVC.getID(); final List<String> evidenceContigs = SVUtils.getAttributeAsStringList(complexVC, CONTIG_NAMES); final List<String> mappingQualities = SVUtils.getAttributeAsStringList(complexVC, MAPPING_QUALITIES); final int maxAlignLength = complexVC.getAttributeAsInt(MAX_ALIGN_LENGTH, 0); return result.stream() .map(vc -> vc.attribute(CPX_EVENT_KEY, sourceID).attribute(CONTIG_NAMES, evidenceContigs) .attribute(MAPPING_QUALITIES, mappingQualities) .attribute(MAX_ALIGN_LENGTH, maxAlignLength).make()) .collect(Collectors.toList()); } private List<VariantContext> whenZeroSegments(final VariantContext complexVC, final BasicReference reference) { final Allele anchorBaseRefAllele = getAnchorBaseRefAllele(complexVC.getContig(), complexVC.getStart(), reference); final int altSeqLength = complexVC.getAttributeAsString(SEQ_ALT_HAPLOTYPE, "").length() - 2; final List<String> mappingQualities = SVUtils.getAttributeAsStringList(complexVC, MAPPING_QUALITIES); final int maxAlignLength = complexVC.getAttributeAsInt(MAX_ALIGN_LENGTH, 0); final VariantContext insertion = makeInsertion(complexVC.getContig(), complexVC.getStart(), complexVC.getStart(), altSeqLength, anchorBaseRefAllele) .attribute(CPX_EVENT_KEY, complexVC.getID()) .attribute(CONTIG_NAMES, complexVC.getAttribute(CONTIG_NAMES)) .attribute(MAPPING_QUALITIES, mappingQualities) .attribute(MAX_ALIGN_LENGTH, maxAlignLength) .make(); return Collections.singletonList(insertion); } private static void whenInversionIsWarranted(final SimpleInterval refSegment, final int invertedAppearanceIdx, final List<String> altArrangement, final BasicReference reference, final List<VariantContextBuilder> result) { final Allele anchorBaseRefAllele = getAnchorBaseRefAllele(refSegment.getContig(), refSegment.getStart(), reference); result.add( makeInversion(refSegment, anchorBaseRefAllele) ); // further check if alt seq length is long enough to trigger an insertion as well, // but guard against case smallIns1 + INV + smallIns2, in theory one could annotate the inversion // with micro-insertions if that's the case, but we try to have minimal annotations here final Allele anchorBaseRefAlleleFront = getAnchorBaseRefAllele(refSegment.getContig(), refSegment.getStart() - 1, reference); final Allele anchorBaseRefAlleleRear = getAnchorBaseRefAllele(refSegment.getContig(), refSegment.getEnd(), reference); extractFrontAndRearInsertions(refSegment, invertedAppearanceIdx, altArrangement, anchorBaseRefAlleleFront, anchorBaseRefAlleleRear, result); } private static void whenNoDeletionIsAllowed( final SimpleInterval refSegment, final int asIsAppearanceIdx, final List<String> altArrangement, final int altSeqLength, final BasicReference reference, final List<VariantContextBuilder> result) { final int segmentSize = refSegment.size(); if (altSeqLength - segmentSize > EVENT_SIZE_THRESHOLD ) { // long enough net gain to trigger insertion calls // distinguish between cases {"1", ....}, {....., "1"}, and {....., "1", ....} to know where to place the insertion final Allele anchorBaseRefAlleleFront = getAnchorBaseRefAllele(refSegment.getContig(), refSegment.getStart() - 1, reference); final Allele anchorBaseRefAlleleRear = getAnchorBaseRefAllele(refSegment.getContig(), refSegment.getEnd(), reference); if ( altArrangement.get(altArrangement.size() - 1).equals("1") ) { // {....., "1"} -> front insertion final VariantContextBuilder frontIns = SegmentedCpxVariantSimpleVariantExtractor.makeInsertion(refSegment.getContig(), refSegment.getStart() - 1, refSegment.getStart() - 1, altSeqLength - segmentSize, anchorBaseRefAlleleFront); result.add(frontIns); } else if ( altArrangement.get(0).equals("1") ) { // {"1", ....} -> rear insertion final VariantContextBuilder rearIns = SegmentedCpxVariantSimpleVariantExtractor.makeInsertion(refSegment.getContig(), refSegment.getEnd(), refSegment.getEnd(), altSeqLength - segmentSize, anchorBaseRefAlleleFront); result.add(rearIns); } else { // {....., "1", ....} -> collect new insertion length before and after extractFrontAndRearInsertions(refSegment, asIsAppearanceIdx, altArrangement, anchorBaseRefAlleleFront, anchorBaseRefAlleleRear, result); } } } private static void whenNoInvAndNoAsIsAppearance( final SimpleInterval refSegment, final int altSeqLength, final BasicReference reference, final List<VariantContextBuilder> result) { if ( refSegment.size() > EVENT_SIZE_THRESHOLD ) { // a deletion call must be present final Allele anchorBaseRefAlleleFront = getAnchorBaseRefAllele(refSegment.getContig(), refSegment.getStart(), reference); // need left shift because the segment boundaries are shared by REF and ALT result.add( makeDeletion(new SimpleInterval(refSegment.getContig(), refSegment.getStart(), refSegment.getEnd() - 1), anchorBaseRefAlleleFront) ); // if the replacing sequence is long enough to trigger an insertion as well if (altSeqLength - 2 > EVENT_SIZE_THRESHOLD) { result.add(makeInsertion(refSegment.getContig(), refSegment.getStart(), refSegment.getStart(), altSeqLength, anchorBaseRefAlleleFront)); } } else if ( altSeqLength - 2 > EVENT_SIZE_THRESHOLD ){ // ref segment not long enough to merit an INV or DEL, so a fat INS, if size is enough final Allele fatInsertionRefAllele = Allele.create(reference.getBases(new SimpleInterval(refSegment.getContig(), refSegment.getStart(), refSegment.getEnd() - 1)), true); result.add( makeInsertion(refSegment.getContig(), refSegment.getStart(), refSegment.getEnd() - 1, altSeqLength - refSegment.size(), fatInsertionRefAllele) ); } } private static void extractFrontAndRearInsertions(final SimpleInterval refSegment, final int segmentIdx, final List<String> altArrangement, final Allele anchorBaseRefAlleleFront, final Allele anchorBaseRefAlleleRear, final List<VariantContextBuilder> result) { final List<Integer> segmentLen = Collections.singletonList(refSegment.size()); final SimpleInterval frontInsPos = SVUtils.makeOneBpInterval(refSegment.getContig(), refSegment.getStart() - 1); final VariantContextBuilder frontIns = getInsFromOneEnd(true, segmentIdx, frontInsPos, anchorBaseRefAlleleFront, segmentLen, altArrangement, true); if (frontIns != null) result.add(frontIns); final SimpleInterval rearInsPos = SVUtils.makeOneBpInterval(refSegment.getContig(), refSegment.getEnd()); final VariantContextBuilder rearIns = getInsFromOneEnd(false, segmentIdx, rearInsPos, anchorBaseRefAlleleRear, segmentLen, altArrangement, true); if (rearIns != null) result.add(rearIns); } } @VisibleForTesting public static final class MultiSegmentsCpxVariantExtractor extends SegmentedCpxVariantSimpleVariantExtractor { private static final long serialVersionUID = 1L; @Override public List<VariantContext> extract(final VariantContext complexVC, final BasicReference reference) { final List<SimpleInterval> refSegments = SVUtils.getAttributeAsStringList(complexVC, CPX_SV_REF_SEGMENTS).stream() .map(SimpleInterval::new) .collect(Collectors.toList()); final List<String> altArrangement = SVUtils.getAttributeAsStringList(complexVC, CPX_EVENT_ALT_ARRANGEMENTS); final Tuple3<Set<SimpleInterval>, Set<Integer>, List<Integer>> missingAndPresentAndInvertedSegments = getMissingAndPresentAndInvertedSegments(refSegments, altArrangement); final Set<SimpleInterval> missingSegments = missingAndPresentAndInvertedSegments._1(); final Set<Integer> presentSegments = missingAndPresentAndInvertedSegments._2(); final List<Integer> invertedSegments = missingAndPresentAndInvertedSegments._3(); final List<VariantContextBuilder> result = new ArrayList<>(); // if affected ref sequence found as is (trusting the aligner), then only output front and/or rear insertions final int idx = findAllSegments(altArrangement, refSegments.size()); if ( idx >= 0 ) { whenAllSegmentsAppearAsIs(complexVC, reference, refSegments, altArrangement, result, idx); } else { // inversions if (!invertedSegments.isEmpty()) { extractInversions(reference, refSegments, presentSegments, invertedSegments, result); } // deletions if (!missingSegments.isEmpty()) { extractDeletions(reference, missingSegments, result); } // head and tail insertions only extractFrontAndRearInsertions(complexVC, refSegments, altArrangement, reference, result); } final String sourceID = complexVC.getID(); final List<String> evidenceContigs = SVUtils.getAttributeAsStringList(complexVC, CONTIG_NAMES); final List<String> mappingQualities = SVUtils.getAttributeAsStringList(complexVC, MAPPING_QUALITIES); final int maxAlignLength = complexVC.getAttributeAsInt(MAX_ALIGN_LENGTH, 0); return result.stream() .map(vc -> vc.attribute(CPX_EVENT_KEY, sourceID).attribute(CONTIG_NAMES, evidenceContigs) .attribute(MAPPING_QUALITIES, mappingQualities) .attribute(MAX_ALIGN_LENGTH, maxAlignLength).make()) .collect(Collectors.toList()); } /** * Given {@code altArrangement} and count of segments, return the index in {@code altArrangement} * pointing to "1" where all segments contiguously appear after that, i.e. the affected reference region * appear as is (with other insertions, duplications, etc at either end) according to {@code altArrangement} * * Example: * ......, 1, 2, 3, 4, ..... * with 4 segments, * and the index of the "1" is 2, * then this function returns 2. * but if the altArrangement is * ......, 1, 2, 3, , ..... * the function returns -1 because not all segments appear as-is in the description. */ @VisibleForTesting static int findAllSegments(final List<String> altArrangement, final int segmentCount) { int idx = -1; int currentlyLookingForSegment = segmentCount; final String segmentCountString = String.valueOf(segmentCount); for (int i = altArrangement.size() - 1; i >= 0 ; --i) { // reversely because we want to follow left-justify convention final String description = altArrangement.get(i); if ( description.equals( String.valueOf(currentlyLookingForSegment) ) ) { if (currentlyLookingForSegment == 1) return i; --currentlyLookingForSegment; } else { currentlyLookingForSegment = description.equals( segmentCountString ) ? segmentCount - 1 : segmentCount; idx = -1; } } return idx; } private static void whenAllSegmentsAppearAsIs(final VariantContext complexVC, final BasicReference reference, final List<SimpleInterval> refSegments, final List<String> altArrangement, final List<VariantContextBuilder> result, final int idx) { final List<Integer> refSegmentLengths = refSegments.stream().map(SimpleInterval::size).collect(Collectors.toList()); if ( idx != 0 ) { // e.g. 4 segments, and alt arrangement is ......, 1,2,3,4, there could be (that is, if long enough) front insertion final SimpleInterval insertionPos = new SimpleInterval(complexVC.getContig(), complexVC.getStart() - 1, complexVC.getStart() - 1); final Allele anchorBaseRefAlleleFront = getAnchorBaseRefAllele(insertionPos.getContig(), insertionPos.getStart(), reference); final VariantContextBuilder frontIns = getInsFromOneEnd(true, idx, insertionPos, anchorBaseRefAlleleFront, refSegmentLengths, altArrangement, true); if (frontIns != null) result.add(frontIns); } if ( idx + refSegments.size() - 1 < altArrangement.size() - 1 ) { // e.g. there's more after 1,2,3,4,..., there could be (that is, if long enough) front insertion final SimpleInterval insertionPos = new SimpleInterval(complexVC.getContig(), complexVC.getEnd(), complexVC.getEnd()); final byte[] refBases = reference.getBases(insertionPos); final Allele anchorBaseRefAlleleRear = Allele.create(refBases, true); final VariantContextBuilder rearIns = getInsFromOneEnd(false, idx + refSegments.size() - 1, insertionPos, anchorBaseRefAlleleRear, refSegmentLengths, altArrangement, true); if (rearIns != null) result.add(rearIns); } } private void extractInversions( final BasicReference reference, final List<SimpleInterval> refSegmentIntervals, final Set<Integer> presentSegments, final List<Integer> invertedSegments, final List<VariantContextBuilder> result) { final List<VariantContextBuilder> inversions = invertedSegments.stream() // large enough; in addition, if both as-is and inverted versions exist, treat as insertions instead of inversions: unlike 1-segment calls, where we don't have consistency problems .filter(i -> refSegmentIntervals.get(i - 1).size() > EVENT_SIZE_THRESHOLD && (!presentSegments.contains(i))) .map(i -> { final SimpleInterval invertedSegment = refSegmentIntervals.get(i - 1); final byte[] ref = reference.getBases(SVUtils.makeOneBpInterval(invertedSegment.getContig(), invertedSegment.getStart())); final Allele refAllele = Allele.create(ref, true); return makeInversion(invertedSegment, refAllele); }) .collect(Collectors.toList()); result.addAll(inversions); } private void extractDeletions( final BasicReference reference, final Set<SimpleInterval> missingSegments, final List<VariantContextBuilder> result) { final List<VariantContextBuilder> deletions = compactifyMissingSegments(missingSegments).stream() .filter(gone -> gone.size() > EVENT_SIZE_THRESHOLD) // large enough .map(gone -> { final byte[] ref = reference.getBases(SVUtils.makeOneBpInterval(gone.getContig(), gone.getStart())); final Allele refAllele = Allele.create(ref, true); return makeDeletion(new SimpleInterval(gone.getContig(), gone.getStart(), gone.getEnd() - 1), refAllele); }) .collect(Collectors.toList()); result.addAll(deletions); } /** * Compactify missingSegments for case when two neighboring segments are both gone, to avoid cases when * 1) neither segment is large enough * 2) calling two small deletions while one should call a big deletion */ @VisibleForTesting static List<SimpleInterval> compactifyMissingSegments(final Set<SimpleInterval> missingSegments) { if (missingSegments.size() == 1) return Collections.singletonList(missingSegments.iterator().next()); // first sort final List<SimpleInterval> sortedMissingSegments = missingSegments.stream() .sorted(Comparator.comparing(SimpleInterval::getStart)) // two segments will NEVER have the same start or overlap on more than one base .collect(Collectors.toList()); final List<SimpleInterval> result = new ArrayList<>(missingSegments.size()); Iterator<SimpleInterval> iterator = sortedMissingSegments.iterator(); SimpleInterval current = iterator.next(); while (iterator.hasNext()) { SimpleInterval next = iterator.next(); if (current.overlapsWithMargin(next, 1)) { current = new SimpleInterval(current.getContig(), current.getStart(), next.getEnd()); } else { result.add(current); current = next; } } result.add(current); return result; } private void extractFrontAndRearInsertions(final VariantContext complexVC, final List<SimpleInterval> refSegmentIntervals, final List<String> altArrangement, final BasicReference reference, final List<VariantContextBuilder> result) { final List<Integer> refSegmentLengths = refSegmentIntervals.stream().map(SimpleInterval::size).collect(Collectors.toList()); // index pointing to first appearance of ref segment (inverted or not) in altArrangement, from either side int firstRefSegmentIdx = 0; // first front for (final String description : altArrangement) { if ( descriptionIndicatesInsertion(description)) { ++firstRefSegmentIdx; } else { break; } } if (firstRefSegmentIdx > 0) { final SimpleInterval startAndStop = SVUtils.makeOneBpInterval(complexVC.getContig(), complexVC.getStart()); final Allele anchorBaseRefAlleleFront = Allele.create(reference.getBases(startAndStop), true); final VariantContextBuilder frontIns = getInsFromOneEnd(true, firstRefSegmentIdx, startAndStop, anchorBaseRefAlleleFront, refSegmentLengths, altArrangement, true); if (frontIns != null) result.add( frontIns ); } firstRefSegmentIdx = altArrangement.size() - 1; // then end for (int i = altArrangement.size() - 1; i > -1 ; --i) { if ( descriptionIndicatesInsertion(altArrangement.get(i))) { --firstRefSegmentIdx; } else { break; } } if (firstRefSegmentIdx != altArrangement.size() - 1) { final int pos = complexVC.getEnd(); final SimpleInterval insertionPos = SVUtils.makeOneBpInterval(complexVC.getContig(), pos); final Allele anchorBaseRefAlleleRear = Allele.create(reference.getBases(insertionPos), true); final VariantContextBuilder rearIns = getInsFromOneEnd(false, firstRefSegmentIdx, insertionPos, anchorBaseRefAlleleRear, refSegmentLengths, altArrangement, true); if (rearIns != null) result.add( rearIns ); } } @VisibleForTesting static boolean descriptionIndicatesInsertion(final String description) { if (description.startsWith(CpxVariantCanonicalRepresentation.UNMAPPED_INSERTION)) return true; return !NumberUtils.isCreatable(description); // "(-)?[0-9]+" is describing segments, we don't count them as insertions } } //================================================================================================================== /** * Reason for requesting increment by 1 via {@code shouldIncreaseInsLenByOne}: * when getting insertion length from either end, * there could be, but not always, a one-bp overlap between the head alignment and * the next alignment that continues the flow (which is not necessarily the 2nd alignment); * so when there's such 1-bp overlap, the insertion length should count this 1-bp overlap. * todo: currently all known calling code provide {@code true}, which is technically wrong, but we need alignment information for tell when to provide true/false * * @return {@code null} if the inserted sequence from the requested end is not over {@link #EVENT_SIZE_THRESHOLD} */ @VisibleForTesting static VariantContextBuilder getInsFromOneEnd(final boolean fromFront, final int idxFirstMatch, final SimpleInterval insertionStartAndStop, final Allele anchorBaseRefAllele, final List<Integer> refSegmentLengths, final List<String> altArrangement, final boolean shouldIncreaseInsLenByOne) { int insLen = 0; if (fromFront) { for (int i = 0; i < idxFirstMatch; ++i) { insLen += getInsLen( altArrangement.get(i), refSegmentLengths ); } } else { for (int i = idxFirstMatch + 1; i < altArrangement.size(); ++i) { insLen += getInsLen( altArrangement.get(i), refSegmentLengths ); } } if (shouldIncreaseInsLenByOne) ++insLen; if (insLen > EVENT_SIZE_THRESHOLD) return makeInsertion(insertionStartAndStop.getContig(), insertionStartAndStop.getStart(), insertionStartAndStop.getEnd(), insLen, anchorBaseRefAllele); else return null; } @VisibleForTesting static int getInsLen(final String description, final List<Integer> refSegmentLengths) { if (description.startsWith(CpxVariantCanonicalRepresentation.UNMAPPED_INSERTION)) { return Integer.valueOf(description.substring(CpxVariantCanonicalRepresentation.UNMAPPED_INSERTION.length() + 1)); } else if ( NumberUtils.isCreatable(description) ){ final int offset = description.startsWith("-") ? 1 : 0; return refSegmentLengths.get( Integer.valueOf(description.substring(offset)) - 1); } else { final int offset = description.startsWith("-") ? 1 : 0; return new SimpleInterval(description.substring(offset)).size(); } } /** * Retrieves from the provide {@code complexVC}, reference segments described in * {@link org.broadinstitute.hellbender.tools.spark.sv.utils.GATKSVVCFConstants#CPX_SV_REF_SEGMENTS}, that are * a) absent * b) present as is, i.e. not inverted * c) inverted */ @VisibleForTesting static Tuple3<Set<SimpleInterval>, Set<Integer>, List<Integer>> getMissingAndPresentAndInvertedSegments(final List<SimpleInterval> refSegments, final List<String> altArrangements ) { final List<Integer> invertedSegments = new ArrayList<>(); final Set<Integer> presentSegments = new TreeSet<>(); altArrangements .forEach(s -> { if ( s.startsWith("-") && ( !s.contains(":") )) { // some segment inverted invertedSegments.add( Integer.valueOf(s.substring(1)) ); } if ( !s.contains(":") && !s.startsWith(CpxVariantCanonicalRepresentation.UNMAPPED_INSERTION) && !s.startsWith("-") ) { // a ref segment, but not inverted presentSegments.add(Integer.valueOf(s)); } }); final Set<SimpleInterval> missingSegments = IntStream.rangeClosed(1, refSegments.size()).boxed() .filter(i -> !presentSegments.contains(i) && !invertedSegments.contains(i)) .map(i -> refSegments.get(i-1)) .collect(Collectors.toSet()); return new Tuple3<>(missingSegments, presentSegments, invertedSegments); } // boiler-plate code block ========================================================================================= private static Allele getAnchorBaseRefAllele(final String chr, final int pos, final BasicReference reference) { return Allele.create(reference.getBases(SVUtils.makeOneBpInterval(chr, pos)), true); } private static final Allele altSymbAlleleDel = Allele.create(SimpleSVType.createBracketedSymbAlleleString(SYMB_ALT_STRING_DEL)); private static final Allele altSymbAlleleIns = Allele.create(SimpleSVType.createBracketedSymbAlleleString(SYMB_ALT_STRING_INS)); private static final Allele altSymbAlleleInv = Allele.create(SimpleSVType.createBracketedSymbAlleleString(SYMB_ALT_STRING_INV)); /** * Note that {@code delRange} is expected to be pre-process to VCF spec compatible, * e.g. if chr1:101-200 is deleted, then {@code delRange} should be chr1:100-200 * @param delRange */ @VisibleForTesting static VariantContextBuilder makeDeletion(final SimpleInterval delRange, final Allele refAllele) { return new VariantContextBuilder() .chr(delRange.getContig()).start(delRange.getStart()).stop(delRange.getEnd()) .alleles(Arrays.asList(refAllele, altSymbAlleleDel)) .id(makeID(SimpleSVType.SupportedType.DEL.name(), delRange.getContig(), delRange.getStart(), delRange.getEnd())) .attribute(VCFConstants.END_KEY, delRange.getEnd()) .attribute(SVLEN, - delRange.size() + 1) .attribute(SVTYPE, SimpleSVType.SupportedType.DEL.name()); } @VisibleForTesting static VariantContextBuilder makeInsertion(final String chr, final int pos, final int end, final int svLen, final Allele refAllele) { return new VariantContextBuilder().chr(chr).start(pos).stop(end) .alleles(Arrays.asList(refAllele, altSymbAlleleIns)) .id(makeID(SimpleSVType.SupportedType.INS.name(), chr, pos, end)) .attribute(VCFConstants.END_KEY, end) .attribute(SVLEN, svLen) .attribute(SVTYPE, SimpleSVType.SupportedType.INS.name()); } @VisibleForTesting static VariantContextBuilder makeInversion(final SimpleInterval invertedRegion, final Allele refAllele) { return new VariantContextBuilder() .chr(invertedRegion.getContig()).start(invertedRegion.getStart() - 1).stop(invertedRegion.getEnd()) // TODO: 5/2/18 VCF spec doesn't requst left shift by 1 for inversion POS .alleles(Arrays.asList(refAllele, altSymbAlleleInv)) .id(makeID(SimpleSVType.SupportedType.INV.name(), invertedRegion.getContig(), invertedRegion.getStart() - 1, invertedRegion.getEnd())) .attribute(VCFConstants.END_KEY, invertedRegion.getEnd()) .attribute(SVLEN, 0) // TODO: 5/2/18 this is following VCF spec, .attribute(SVTYPE, SimpleSVType.SupportedType.INV.name()); } }
package org.bouncycastle.crypto.tls; /** * RFC 2246 A.5 */ public class CipherSuite { public static final int TLS_NULL_WITH_NULL_NULL = 0x0000; public static final int TLS_RSA_WITH_NULL_MD5 = 0x0001; public static final int TLS_RSA_WITH_NULL_SHA = 0x0002; public static final int TLS_RSA_EXPORT_WITH_RC4_40_MD5 = 0x0003; public static final int TLS_RSA_WITH_RC4_128_MD5 = 0x0004; public static final int TLS_RSA_WITH_RC4_128_SHA = 0x0005; public static final int TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5 = 0x0006; public static final int TLS_RSA_WITH_IDEA_CBC_SHA = 0x0007; public static final int TLS_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x0008; public static final int TLS_RSA_WITH_DES_CBC_SHA = 0x0009; public static final int TLS_RSA_WITH_3DES_EDE_CBC_SHA = 0x000A; public static final int TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA = 0x000B; public static final int TLS_DH_DSS_WITH_DES_CBC_SHA = 0x000C; public static final int TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA = 0x000D; public static final int TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x000E; public static final int TLS_DH_RSA_WITH_DES_CBC_SHA = 0x000F; public static final int TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA = 0x0010; public static final int TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA = 0x0011; public static final int TLS_DHE_DSS_WITH_DES_CBC_SHA = 0x0012; public static final int TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA = 0x0013; public static final int TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x0014; public static final int TLS_DHE_RSA_WITH_DES_CBC_SHA = 0x0015; public static final int TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA = 0x0016; public static final int TLS_DH_anon_EXPORT_WITH_RC4_40_MD5 = 0x0017; public static final int TLS_DH_anon_WITH_RC4_128_MD5 = 0x0018; public static final int TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA = 0x0019; public static final int TLS_DH_anon_WITH_DES_CBC_SHA = 0x001A; public static final int TLS_DH_anon_WITH_3DES_EDE_CBC_SHA = 0x001B; /* * Note: The cipher suite values { 0x00, 0x1C } and { 0x00, 0x1D } are reserved to avoid * collision with Fortezza-based cipher suites in SSL 3. */ /* * RFC 3268 */ public static final int TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F; public static final int TLS_DH_DSS_WITH_AES_128_CBC_SHA = 0x0030; public static final int TLS_DH_RSA_WITH_AES_128_CBC_SHA = 0x0031; public static final int TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032; public static final int TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033; public static final int TLS_DH_anon_WITH_AES_128_CBC_SHA = 0x0034; public static final int TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035; public static final int TLS_DH_DSS_WITH_AES_256_CBC_SHA = 0x0036; public static final int TLS_DH_RSA_WITH_AES_256_CBC_SHA = 0x0037; public static final int TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038; public static final int TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039; public static final int TLS_DH_anon_WITH_AES_256_CBC_SHA = 0x003A; /* * RFC 5932 */ public static final int TLS_RSA_WITH_CAMELLIA_128_CBC_SHA = 0x0041; public static final int TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA = 0x0042; public static final int TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA = 0x0043; public static final int TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA = 0x0044; public static final int TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA = 0x0045; public static final int TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA = 0x0046; public static final int TLS_RSA_WITH_CAMELLIA_256_CBC_SHA = 0x0084; public static final int TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA = 0x0085; public static final int TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA = 0x0086; public static final int TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA = 0x0087; public static final int TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA = 0x0088; public static final int TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA = 0x0089; public static final int TLS_RSA_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BA; public static final int TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BB; public static final int TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BC; public static final int TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BD; public static final int TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BE; public static final int TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BF; public static final int TLS_RSA_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C0; public static final int TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C1; public static final int TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C2; public static final int TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C3; public static final int TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C4; public static final int TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C5; /* * RFC 4162 */ public static final int TLS_RSA_WITH_SEED_CBC_SHA = 0x0096; public static final int TLS_DH_DSS_WITH_SEED_CBC_SHA = 0x0097; public static final int TLS_DH_RSA_WITH_SEED_CBC_SHA = 0x0098; public static final int TLS_DHE_DSS_WITH_SEED_CBC_SHA = 0x0099; public static final int TLS_DHE_RSA_WITH_SEED_CBC_SHA = 0x009A; public static final int TLS_DH_anon_WITH_SEED_CBC_SHA = 0x009B; /* * RFC 4279 */ public static final int TLS_PSK_WITH_RC4_128_SHA = 0x008A; public static final int TLS_PSK_WITH_3DES_EDE_CBC_SHA = 0x008B; public static final int TLS_PSK_WITH_AES_128_CBC_SHA = 0x008C; public static final int TLS_PSK_WITH_AES_256_CBC_SHA = 0x008D; public static final int TLS_DHE_PSK_WITH_RC4_128_SHA = 0x008E; public static final int TLS_DHE_PSK_WITH_3DES_EDE_CBC_SHA = 0x008F; public static final int TLS_DHE_PSK_WITH_AES_128_CBC_SHA = 0x0090; public static final int TLS_DHE_PSK_WITH_AES_256_CBC_SHA = 0x0091; public static final int TLS_RSA_PSK_WITH_RC4_128_SHA = 0x0092; public static final int TLS_RSA_PSK_WITH_3DES_EDE_CBC_SHA = 0x0093; public static final int TLS_RSA_PSK_WITH_AES_128_CBC_SHA = 0x0094; public static final int TLS_RSA_PSK_WITH_AES_256_CBC_SHA = 0x0095; /* * RFC 4492 */ public static final int TLS_ECDH_ECDSA_WITH_NULL_SHA = 0xC001; public static final int TLS_ECDH_ECDSA_WITH_RC4_128_SHA = 0xC002; public static final int TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA = 0xC003; public static final int TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA = 0xC004; public static final int TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA = 0xC005; public static final int TLS_ECDHE_ECDSA_WITH_NULL_SHA = 0xC006; public static final int TLS_ECDHE_ECDSA_WITH_RC4_128_SHA = 0xC007; public static final int TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA = 0xC008; public static final int TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009; public static final int TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A; public static final int TLS_ECDH_RSA_WITH_NULL_SHA = 0xC00B; public static final int TLS_ECDH_RSA_WITH_RC4_128_SHA = 0xC00C; public static final int TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA = 0xC00D; public static final int TLS_ECDH_RSA_WITH_AES_128_CBC_SHA = 0xC00E; public static final int TLS_ECDH_RSA_WITH_AES_256_CBC_SHA = 0xC00F; public static final int TLS_ECDHE_RSA_WITH_NULL_SHA = 0xC010; public static final int TLS_ECDHE_RSA_WITH_RC4_128_SHA = 0xC011; public static final int TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA = 0xC012; public static final int TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013; public static final int TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014; public static final int TLS_ECDH_anon_WITH_NULL_SHA = 0xC015; public static final int TLS_ECDH_anon_WITH_RC4_128_SHA = 0xC016; public static final int TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA = 0xC017; public static final int TLS_ECDH_anon_WITH_AES_128_CBC_SHA = 0xC018; public static final int TLS_ECDH_anon_WITH_AES_256_CBC_SHA = 0xC019; /* * RFC 4785 */ public static final int TLS_PSK_WITH_NULL_SHA = 0x002C; public static final int TLS_DHE_PSK_WITH_NULL_SHA = 0x002D; public static final int TLS_RSA_PSK_WITH_NULL_SHA = 0x002E; /* * RFC 5054 */ public static final int TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA = 0xC01A; public static final int TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA = 0xC01B; public static final int TLS_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA = 0xC01C; public static final int TLS_SRP_SHA_WITH_AES_128_CBC_SHA = 0xC01D; public static final int TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA = 0xC01E; public static final int TLS_SRP_SHA_DSS_WITH_AES_128_CBC_SHA = 0xC01F; public static final int TLS_SRP_SHA_WITH_AES_256_CBC_SHA = 0xC020; public static final int TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA = 0xC021; public static final int TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA = 0xC022; /* * RFC 5246 */ public static final int TLS_RSA_WITH_NULL_SHA256 = 0x003B; public static final int TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C; public static final int TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D; public static final int TLS_DH_DSS_WITH_AES_128_CBC_SHA256 = 0x003E; public static final int TLS_DH_RSA_WITH_AES_128_CBC_SHA256 = 0x003F; public static final int TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040; public static final int TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067; public static final int TLS_DH_DSS_WITH_AES_256_CBC_SHA256 = 0x0068; public static final int TLS_DH_RSA_WITH_AES_256_CBC_SHA256 = 0x0069; public static final int TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A; public static final int TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B; public static final int TLS_DH_anon_WITH_AES_128_CBC_SHA256 = 0x006C; public static final int TLS_DH_anon_WITH_AES_256_CBC_SHA256 = 0x006D; /* * RFC 5288 */ public static final int TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C; public static final int TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D; public static final int TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E; public static final int TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F; public static final int TLS_DH_RSA_WITH_AES_128_GCM_SHA256 = 0x00A0; public static final int TLS_DH_RSA_WITH_AES_256_GCM_SHA384 = 0x00A1; public static final int TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2; public static final int TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3; public static final int TLS_DH_DSS_WITH_AES_128_GCM_SHA256 = 0x00A4; public static final int TLS_DH_DSS_WITH_AES_256_GCM_SHA384 = 0x00A5; public static final int TLS_DH_anon_WITH_AES_128_GCM_SHA256 = 0x00A6; public static final int TLS_DH_anon_WITH_AES_256_GCM_SHA384 = 0x00A7; /* * RFC 5289 */ public static final int TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023; public static final int TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024; public static final int TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC025; public static final int TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC026; public static final int TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027; public static final int TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028; public static final int TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256 = 0xC029; public static final int TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384 = 0xC02A; public static final int TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B; public static final int TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C; public static final int TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02D; public static final int TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02E; public static final int TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F; public static final int TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030; public static final int TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256 = 0xC031; public static final int TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384 = 0xC032; /* * RFC 5487 */ public static final int TLS_PSK_WITH_AES_128_GCM_SHA256 = 0x00A8; public static final int TLS_PSK_WITH_AES_256_GCM_SHA384 = 0x00A9; public static final int TLS_DHE_PSK_WITH_AES_128_GCM_SHA256 = 0x00AA; public static final int TLS_DHE_PSK_WITH_AES_256_GCM_SHA384 = 0x00AB; public static final int TLS_RSA_PSK_WITH_AES_128_GCM_SHA256 = 0x00AC; public static final int TLS_RSA_PSK_WITH_AES_256_GCM_SHA384 = 0x00AD; public static final int TLS_PSK_WITH_AES_128_CBC_SHA256 = 0x00AE; public static final int TLS_PSK_WITH_AES_256_CBC_SHA384 = 0x00AF; public static final int TLS_PSK_WITH_NULL_SHA256 = 0x00B0; public static final int TLS_PSK_WITH_NULL_SHA384 = 0x00B1; public static final int TLS_DHE_PSK_WITH_AES_128_CBC_SHA256 = 0x00B2; public static final int TLS_DHE_PSK_WITH_AES_256_CBC_SHA384 = 0x00B3; public static final int TLS_DHE_PSK_WITH_NULL_SHA256 = 0x00B4; public static final int TLS_DHE_PSK_WITH_NULL_SHA384 = 0x00B5; public static final int TLS_RSA_PSK_WITH_AES_128_CBC_SHA256 = 0x00B6; public static final int TLS_RSA_PSK_WITH_AES_256_CBC_SHA384 = 0x00B7; public static final int TLS_RSA_PSK_WITH_NULL_SHA256 = 0x00B8; public static final int TLS_RSA_PSK_WITH_NULL_SHA384 = 0x00B9; /* * RFC 5489 */ public static final int TLS_ECDHE_PSK_WITH_RC4_128_SHA = 0xC033; public static final int TLS_ECDHE_PSK_WITH_3DES_EDE_CBC_SHA = 0xC034; public static final int TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA = 0xC035; public static final int TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA = 0xC036; public static final int TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA256 = 0xC037; public static final int TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA384 = 0xC038; public static final int TLS_ECDHE_PSK_WITH_NULL_SHA = 0xC039; public static final int TLS_ECDHE_PSK_WITH_NULL_SHA256 = 0xC03A; public static final int TLS_ECDHE_PSK_WITH_NULL_SHA384 = 0xC03B; /* * RFC 5746 */ public static final int TLS_EMPTY_RENEGOTIATION_INFO_SCSV = 0x00FF; /* * RFC 6367 */ public static final int TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256 = 0xC072; public static final int TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384 = 0xC073; public static final int TLS_ECDH_ECDSA_WITH_CAMELLIA_128_CBC_SHA256 = 0xC074; public static final int TLS_ECDH_ECDSA_WITH_CAMELLIA_256_CBC_SHA384 = 0xC075; public static final int TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256 = 0xC076; public static final int TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384 = 0xC077; public static final int TLS_ECDH_RSA_WITH_CAMELLIA_128_CBC_SHA256 = 0xC078; public static final int TLS_ECDH_RSA_WITH_CAMELLIA_256_CBC_SHA384 = 0xC079; public static final int TLS_RSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC07A; public static final int TLS_RSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC07B; public static final int TLS_DHE_RSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC07C; public static final int TLS_DHE_RSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC07D; public static final int TLS_DH_RSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC07E; public static final int TLS_DH_RSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC07F; public static final int TLS_DHE_DSS_WITH_CAMELLIA_128_GCM_SHA256 = 0xC080; public static final int TLS_DHE_DSS_WITH_CAMELLIA_256_GCM_SHA384 = 0xC081; public static final int TLS_DH_DSS_WITH_CAMELLIA_128_GCM_SHA256 = 0xC082; public static final int TLS_DH_DSS_WITH_CAMELLIA_256_GCM_SHA384 = 0xC083; public static final int TLS_DH_anon_WITH_CAMELLIA_128_GCM_SHA256 = 0xC084; public static final int TLS_DH_anon_WITH_CAMELLIA_256_GCM_SHA384 = 0xC085; public static final int TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC086; public static final int TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC087; public static final int TLS_ECDH_ECDSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC088; public static final int TLS_ECDH_ECDSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC089; public static final int TLS_ECDHE_RSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC08A; public static final int TLS_ECDHE_RSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC08B; public static final int TLS_ECDH_RSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC08C; public static final int TLS_ECDH_RSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC08D; public static final int TLS_PSK_WITH_CAMELLIA_128_GCM_SHA256 = 0xC08E; public static final int TLS_PSK_WITH_CAMELLIA_256_GCM_SHA384 = 0xC08F; public static final int TLS_DHE_PSK_WITH_CAMELLIA_128_GCM_SHA256 = 0xC090; public static final int TLS_DHE_PSK_WITH_CAMELLIA_256_GCM_SHA384 = 0xC091; public static final int TLS_RSA_PSK_WITH_CAMELLIA_128_GCM_SHA256 = 0xC092; public static final int TLS_RSA_PSK_WITH_CAMELLIA_256_GCM_SHA384 = 0xC093; public static final int TLS_PSK_WITH_CAMELLIA_128_CBC_SHA256 = 0xC094; public static final int TLS_PSK_WITH_CAMELLIA_256_CBC_SHA384 = 0xC095; public static final int TLS_DHE_PSK_WITH_CAMELLIA_128_CBC_SHA256 = 0xC096; public static final int TLS_DHE_PSK_WITH_CAMELLIA_256_CBC_SHA384 = 0xC097; public static final int TLS_RSA_PSK_WITH_CAMELLIA_128_CBC_SHA256 = 0xC098; public static final int TLS_RSA_PSK_WITH_CAMELLIA_256_CBC_SHA384 = 0xC099; public static final int TLS_ECDHE_PSK_WITH_CAMELLIA_128_CBC_SHA256 = 0xC09A; public static final int TLS_ECDHE_PSK_WITH_CAMELLIA_256_CBC_SHA384 = 0xC09B; /* * RFC 6655 */ public static final int TLS_RSA_WITH_AES_128_CCM = 0xC09C; public static final int TLS_RSA_WITH_AES_256_CCM = 0xC09D; public static final int TLS_DHE_RSA_WITH_AES_128_CCM = 0xC09E; public static final int TLS_DHE_RSA_WITH_AES_256_CCM = 0xC09F; public static final int TLS_RSA_WITH_AES_128_CCM_8 = 0xC0A0; public static final int TLS_RSA_WITH_AES_256_CCM_8 = 0xC0A1; public static final int TLS_DHE_RSA_WITH_AES_128_CCM_8 = 0xC0A2; public static final int TLS_DHE_RSA_WITH_AES_256_CCM_8 = 0xC0A3; public static final int TLS_PSK_WITH_AES_128_CCM = 0xC0A4; public static final int TLS_PSK_WITH_AES_256_CCM = 0xC0A5; public static final int TLS_DHE_PSK_WITH_AES_128_CCM = 0xC0A6; public static final int TLS_DHE_PSK_WITH_AES_256_CCM = 0xC0A7; public static final int TLS_PSK_WITH_AES_128_CCM_8 = 0xC0A8; public static final int TLS_PSK_WITH_AES_256_CCM_8 = 0xC0A9; public static final int TLS_PSK_DHE_WITH_AES_128_CCM_8 = 0xC0AA; public static final int TLS_PSK_DHE_WITH_AES_256_CCM_8 = 0xC0AB; /* * draft-agl-tls-chacha20poly1305-04 */ public static final int TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCC13; public static final int TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCC14; public static final int TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCC15; /* * draft-josefsson-salsa20-tls-04 */ public static final int TLS_RSA_WITH_ESTREAM_SALSA20_SHA1 = 0xE410; public static final int TLS_RSA_WITH_SALSA20_SHA1 = 0xE411; public static final int TLS_ECDHE_RSA_WITH_ESTREAM_SALSA20_SHA1 = 0xE412; public static final int TLS_ECDHE_RSA_WITH_SALSA20_SHA1 = 0xE413; public static final int TLS_ECDHE_ECDSA_WITH_ESTREAM_SALSA20_SHA1 = 0xE414; public static final int TLS_ECDHE_ECDSA_WITH_SALSA20_SHA1 = 0xE415; public static final int TLS_PSK_WITH_ESTREAM_SALSA20_SHA1 = 0xE416; public static final int TLS_PSK_WITH_SALSA20_SHA1 = 0xE417; public static final int TLS_ECDHE_PSK_WITH_ESTREAM_SALSA20_SHA1 = 0xE418; public static final int TLS_ECDHE_PSK_WITH_SALSA20_SHA1 = 0xE419; public static final int TLS_RSA_PSK_WITH_ESTREAM_SALSA20_SHA1 = 0xE41A; public static final int TLS_RSA_PSK_WITH_SALSA20_SHA1 = 0xE41B; public static final int TLS_DHE_PSK_WITH_ESTREAM_SALSA20_SHA1 = 0xE41C; public static final int TLS_DHE_PSK_WITH_SALSA20_SHA1 = 0xE41D; public static final int TLS_DHE_RSA_WITH_ESTREAM_SALSA20_SHA1 = 0xE41E; public static final int TLS_DHE_RSA_WITH_SALSA20_SHA1 = 0xE41F; /* * draft-ietf-tls-downgrade-scsv-00 */ public static final int TLS_FALLBACK_SCSV = 0x5600; public static boolean isSCSV(int cipherSuite) { switch (cipherSuite) { case TLS_EMPTY_RENEGOTIATION_INFO_SCSV: case TLS_FALLBACK_SCSV: return true; default: return false; } } }
package edu.hm.hafner.analysis.parser; import java.util.Iterator; import org.junit.jupiter.api.Test; import edu.hm.hafner.analysis.AbstractParserTest; import edu.hm.hafner.analysis.Issue; import edu.hm.hafner.analysis.Report; import edu.hm.hafner.analysis.Severity; import edu.hm.hafner.analysis.assertions.SoftAssertions; import static edu.hm.hafner.analysis.assertions.Assertions.*; /** * Tests the class {@link GccParser}. * * @author Ullrich Hafner * @author Raphael Furch */ class GccParserTest extends AbstractParserTest { private static final String GCC_ERROR = GccParser.GCC_ERROR; private static final String GCC_WARNING = "GCC warning"; protected GccParserTest() { super("gcc.txt"); } /** * Checks that a false positive is not reported anymore. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-34141">Issue 34141</a> */ @Test void issue34141() { Report warnings = parse("issue34141.txt"); assertThat(warnings).isEmpty(); } /** * Verifies that the message contains escaped XML characters. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-17309">Issue 17309</a> */ @Test void issue17309() { Report warnings = parse("issue17309.txt"); assertThat(warnings).hasSize(1); try (SoftAssertions softly = new SoftAssertions()) { softly.assertThat(warnings.get(0)) .hasLineStart(4) .hasLineEnd(4) .hasMessage("dereferencing pointer &apos;&lt;anonymous&gt;&apos; does break strict-aliasing rules") .hasFileName("foo.cc") .hasCategory(GCC_ERROR) .hasSeverity(Severity.WARNING_HIGH); } } /** * Parses a file with one warning that are started by ant. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-9926">Issue 9926</a> */ @Test void issue9926() { Report warnings = parse("issue9926.txt"); assertThat(warnings).hasSize(1); try (SoftAssertions softly = new SoftAssertions()) { softly.assertThat(warnings.get(0)) .hasLineStart(52) .hasLineEnd(52) .hasMessage("large integer implicitly truncated to unsigned type") .hasFileName("src/test_simple_sgs_message.cxx") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); } } /** * Parses a file with two GCC warnings. */ @Override protected void assertThatIssuesArePresent(final Report report, final SoftAssertions softly) { softly.assertThat(report).hasSize(8); Iterator<Issue> iterator = report.iterator(); softly.assertThat(iterator.next()) .hasLineStart(451) .hasLineEnd(451) .hasMessage("`void yyunput(int, char*)&apos; defined but not used") .hasFileName("testhist.l") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); softly.assertThat(iterator.next()) .hasLineStart(73) .hasLineEnd(73) .hasMessage("implicit typename is deprecated, please see the documentation for details") .hasFileName("/u1/drjohn/bfdist/packages/RegrTest/V00-03-01/RgtAddressLineScan.cc") .hasCategory(GCC_ERROR) .hasSeverity(Severity.WARNING_HIGH); softly.assertThat(iterator.next()) .hasLineStart(4) .hasLineEnd(4) .hasMessage("foo.h: No such file or directory") .hasFileName("foo.cc") .hasCategory(GCC_ERROR) .hasSeverity(Severity.WARNING_HIGH); softly.assertThat(iterator.next()) .hasLineStart(0) .hasLineEnd(0) .hasMessage("undefined reference to &apos;missing_symbol&apos;") .hasFileName("foo.so") .hasCategory(GCC_ERROR) .hasSeverity(Severity.WARNING_HIGH); softly.assertThat(iterator.next()) .hasLineStart(678) .hasLineEnd(678) .hasMessage("missing initializer for member sigaltstack::ss_sp") .hasFileName("../../lib/linux-i686/include/boost/test/impl/execution_monitor.ipp") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); softly.assertThat(iterator.next()) .hasLineStart(678) .hasLineEnd(678) .hasMessage("missing initializer for member sigaltstack::ss_flags") .hasFileName("../../lib/linux-i686/include/boost/test/impl/execution_monitor.ipp") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); softly.assertThat(iterator.next()) .hasLineStart(678) .hasLineEnd(678) .hasMessage("missing initializer for member sigaltstack::ss_size") .hasFileName("../../lib/linux-i686/include/boost/test/impl/execution_monitor.ipp") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); softly.assertThat(iterator.next()) .hasLineStart(52) .hasLineEnd(52) .hasMessage("large integer implicitly truncated to unsigned type") .hasFileName("src/test_simple_sgs_message.cxx") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); } /** * Parses a warning log with 2 new GCC warnings. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-3897">Issue 3897</a> */ @Test void issue3897and3898() { Report warnings = parse("issue3897.txt"); assertThat(warnings).hasSize(3); Iterator<? extends Issue> iterator = warnings.iterator(); try (SoftAssertions softly = new SoftAssertions()) { softly.assertThat(iterator.next()) .hasLineStart(12) .hasLineEnd(12) .hasMessage("file.h: No such file or directory") .hasFileName("/dir1/dir2/file.c") .hasCategory(GccParser.GCC_ERROR) .hasSeverity(Severity.WARNING_HIGH); softly.assertThat(iterator.next()) .hasLineStart(233) .hasLineEnd(233) .hasMessage("undefined reference to `MyInterface::getValue() const&apos;") .hasFileName("/dir1/dir3/file.cpp") .hasCategory(GccParser.GCC_ERROR) .hasSeverity(Severity.WARNING_HIGH); softly.assertThat(iterator.next()) .hasLineStart(20) .hasLineEnd(20) .hasMessage("invalid preprocessing directive #incldue") .hasFileName("/dir1/dir2/file.cpp") .hasCategory(GccParser.GCC_ERROR) .hasSeverity(Severity.WARNING_HIGH); } } /** * Parses a warning log with 2 GCC warnings, one of them a note. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-4712">Issue 4712</a> */ @Test void issue4712() { Report warnings = parse("issue4712.txt"); assertThat(warnings).hasSize(2); Iterator<? extends Issue> iterator = warnings.iterator(); try (SoftAssertions softly = new SoftAssertions()) { softly.assertThat(iterator.next()) .hasLineStart(352) .hasLineEnd(352) .hasMessage("&apos;s2.mepSector2::lubrications&apos; may be used") .hasFileName("main/mep.cpp") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); softly.assertThat(iterator.next()) .hasLineStart(1477) .hasLineEnd(1477) .hasMessage("&apos;s2.mepSector2::lubrications&apos; was declared here") .hasFileName("main/mep.cpp") .hasCategory("GCC note") .hasSeverity(Severity.WARNING_LOW); } } /** * Parses a warning log with a ClearCase command line that should not be parsed as a warning. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-4712">Issue 4712</a> */ @Test void issue4700() { Report warnings = parse("issue4700.txt"); assertThat(warnings).isEmpty(); } /** * Parses a warning log with [exec] prefix. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-4712">Issue 4707</a> */ @Test void issue4707() { Report warnings = parse("issue4707.txt"); assertThat(warnings).hasSize(11).hasDuplicatesSize(11); try (SoftAssertions softly = new SoftAssertions()) { softly.assertThat(warnings.get(0)) .hasLineStart(1128) .hasLineEnd(1128) .hasMessage("NULL used in arithmetic") .hasFileName( "/Users/rthomson/hudson/jobs/Bryce7-MacWarnings/workspace/bryce7/src/Bryce/Plugins/3DSExport/3DSExport.cpp") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); } } /** * Parses a linker error. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-4010">Issue 4010</a> */ @Test void issue4010() { Report warnings = parse("issue4010.txt"); assertThat(warnings).hasSize(1); try (SoftAssertions softly = new SoftAssertions()) { softly.assertThat(warnings.get(0)) .hasLineStart(0) .hasLineEnd(0) .hasMessage("cannot find -lMyLib") .hasFileName("MyLib") .hasCategory(GccParser.LINKER_ERROR) .hasSeverity(Severity.WARNING_HIGH); } } /** * Parses a warning log with 6 new objective C warnings. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-4274">Issue 4274</a> */ @Test void issue4274() { Report warnings = parse("issue4274.txt"); assertThat(warnings).hasSize(4); Iterator<? extends Issue> iterator = warnings.iterator(); try (SoftAssertions softly = new SoftAssertions()) { softly.assertThat(iterator.next()) .hasLineStart(638) .hasLineEnd(638) .hasMessage("local declaration of &quot;command&quot; hides instance variable") .hasFileName("folder1/file1.m") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); softly.assertThat(iterator.next()) .hasLineStart(640) .hasLineEnd(640) .hasMessage("instance variable &quot;command&quot; accessed in class method") .hasFileName("folder1/file1.m") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); softly.assertThat(iterator.next()) .hasLineStart(47) .hasLineEnd(47) .hasMessage("&quot;oldGeb&quot; might be used uninitialized in this function") .hasFileName("file1.m") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); softly.assertThat(iterator.next()) .hasLineStart(640) .hasLineEnd(640) .hasMessage("local declaration of &quot;command&quot; hides instance variable") .hasFileName("file1.m") .hasCategory(GCC_WARNING) .hasSeverity(Severity.WARNING_NORMAL); } } /** * Parses a file with one warning and matching warning that will be excluded afterwards. * * @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-4260">Issue 4260</a> */ @Test void issue4260() { Report warnings = parse("issue4260.txt"); assertThat(warnings).hasSize(1); } @Override protected GccParser createParser() { return new GccParser(); } }
package com.github.bogdanlivadariu.jenkins.reporting.junit; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.model.Action; import hudson.model.BuildListener; import hudson.model.Result; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.Computer; import hudson.slaves.SlaveComputer; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.BuildStepMonitor; import hudson.tasks.Publisher; import hudson.tasks.Recorder; import hudson.util.FormValidation; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.servlet.ServletException; import org.apache.tools.ant.DirectoryScanner; import org.kohsuke.stapler.AncestorInPath; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.QueryParameter; import com.github.bogdanlivadariu.reporting.junit.builder.JUnitReportBuilder; @SuppressWarnings("unchecked") public class JUnitTestReportPublisher extends Recorder { private static final String DEFAULT_FILE_INCLUDE_PATTERN = "**/*.xml"; private final String jsonReportDirectory; private final String fileIncludePattern; private final String fileExcludePattern; private final boolean markAsUnstable; private final boolean copyHTMLInWorkspace; @DataBoundConstructor public JUnitTestReportPublisher(String jsonReportDirectory, String fileIncludePattern, String fileExcludePattern, boolean markAsUnstable, boolean copyHTMLInWorkspace) { this.jsonReportDirectory = jsonReportDirectory; this.fileIncludePattern = fileIncludePattern; this.fileExcludePattern = fileExcludePattern; this.markAsUnstable = markAsUnstable; this.copyHTMLInWorkspace = copyHTMLInWorkspace; } public String getJsonReportDirectory() { return jsonReportDirectory; } public String getFileIncludePattern() { return fileIncludePattern; } public String getFileExcludePattern() { return fileExcludePattern; } public boolean isMarkAsUnstable() { return markAsUnstable; } public boolean isCopyHTMLInWorkspace() { return copyHTMLInWorkspace; } private String[] findJsonFiles(File targetDirectory, String fileIncludePattern, String fileExcludePattern) { DirectoryScanner scanner = new DirectoryScanner(); if (fileIncludePattern == null || fileIncludePattern.isEmpty()) { scanner.setIncludes(new String[] {DEFAULT_FILE_INCLUDE_PATTERN}); } else { scanner.setIncludes(new String[] {fileIncludePattern}); } if (fileExcludePattern != null) { scanner.setExcludes(new String[] {fileExcludePattern}); } scanner.setBasedir(targetDirectory); scanner.scan(); return scanner.getIncludedFiles(); } @Override public boolean perform(AbstractBuild build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException { listener.getLogger().println("[JUnitReportPublisher] Compiling JUnit Html Reports ..."); // source directory (possibly on slave) FilePath workspaceJsonReportDirectory; if (getJsonReportDirectory().isEmpty()) { workspaceJsonReportDirectory = build.getWorkspace(); } else { workspaceJsonReportDirectory = new FilePath(build.getWorkspace(), getJsonReportDirectory()); } // target directory (always on master) File targetBuildDirectory = new File(build.getRootDir(), "junit-reports-with-handlebars"); if (!targetBuildDirectory.exists()) { targetBuildDirectory.mkdirs(); } if (Computer.currentComputer() instanceof SlaveComputer) { listener.getLogger().println( "[JUnit test report builder] Copying XML files from slave: " + workspaceJsonReportDirectory.getRemote() + " to master reports directory: " + targetBuildDirectory); } else { listener.getLogger().println( "[JUnit test report builder] Copying XML files from: " + workspaceJsonReportDirectory.getRemote() + " to reports directory: " + targetBuildDirectory); } File targetBuildJsonDirectory = new File(targetBuildDirectory.getAbsolutePath() + "/xmlData"); if (!targetBuildJsonDirectory.exists()) { targetBuildJsonDirectory.mkdirs(); } String includePattern = (fileIncludePattern == null || fileIncludePattern.isEmpty()) ? DEFAULT_FILE_INCLUDE_PATTERN : fileIncludePattern; workspaceJsonReportDirectory.copyRecursiveTo(includePattern, new FilePath( targetBuildJsonDirectory)); // generate the reports from the targetBuildDirectory Result result = Result.NOT_BUILT; String[] jsonReportFiles = findJsonFiles(targetBuildJsonDirectory, getFileIncludePattern(), getFileExcludePattern()); if (jsonReportFiles.length > 0) { listener.getLogger().println( String.format("[JUnitReportPublisher] Found %d xml files.", jsonReportFiles.length)); int jsonIndex = 0; for (String jsonReportFile : jsonReportFiles) { listener.getLogger().println( "[JUnit test report builder] " + jsonIndex + ". Found a xml file: " + jsonReportFile); jsonIndex++; } listener.getLogger().println("[JUnit test report builder] Generating HTML reports"); try { List<String> fullJsonPaths = new ArrayList<String>(); // reportBuilder.generateReports(); for (String fi : jsonReportFiles) { fullJsonPaths.add(targetBuildJsonDirectory + "/" + fi); } for (String ss : fullPathToXmlFiles(jsonReportFiles, targetBuildJsonDirectory)) { listener.getLogger().println("processing: " + ss); } JUnitReportBuilder rep = new JUnitReportBuilder(fullPathToXmlFiles(jsonReportFiles, targetBuildJsonDirectory), targetBuildDirectory.getAbsolutePath()); boolean featuresResult = rep.writeReportsOnDisk(); if (featuresResult) { result = Result.SUCCESS; } else { result = isMarkAsUnstable() ? Result.UNSTABLE : Result.FAILURE; } // finally copy to workspace, if needed if (isCopyHTMLInWorkspace()) { FilePath workspaceCopyDirectory = new FilePath(build.getWorkspace(), "junit-reports-with-handlebars"); if (workspaceCopyDirectory.exists()) { workspaceCopyDirectory.deleteRecursive(); } listener.getLogger().println( "[JUnit test report builder] Copying report to workspace directory: " + workspaceCopyDirectory.toURI()); new FilePath(targetBuildDirectory).copyRecursiveTo("**/*.html", workspaceCopyDirectory); } } catch (Exception e) { result = Result.FAILURE; listener.getLogger().println( "[JUnit test report builder] there was an error generating the reports: " + e); for (StackTraceElement error : e.getStackTrace()) { listener.getLogger().println(error); } } } else { result = Result.SUCCESS; listener.getLogger().println( "[JUnit test report builder] xml path for the reports might be wrong, " + targetBuildDirectory); } build.addAction(new JunitTestReportBuildAction(build)); build.setResult(result); return true; } private List<String> fullPathToXmlFiles(String[] xmlFiles, File targetBuildDirectory) { List<String> fullPathList = new ArrayList<String>(); for (String file : xmlFiles) { fullPathList.add(new File(targetBuildDirectory, file).getAbsolutePath()); } return fullPathList; } @Override public Action getProjectAction(AbstractProject< ? , ? > project) { return new JUnitTestReportProjectAction(project); } @Extension public static class DescriptorImpl extends BuildStepDescriptor<Publisher> { @Override public String getDisplayName() { return "Publish JUnit reports generated with handlebars"; } // Performs on-the-fly validation on the file mask wildcard. public FormValidation doCheck(@AncestorInPath AbstractProject project, @QueryParameter String value) throws IOException, ServletException { FilePath ws = project.getSomeWorkspace(); return ws != null ? ws.validateRelativeDirectory(value) : FormValidation.ok(); } @Override public boolean isApplicable(Class< ? extends AbstractProject> jobType) { return true; } } public BuildStepMonitor getRequiredMonitorService() { return BuildStepMonitor.NONE; } }
package com.cassandra.utility.method1; import com.datastax.driver.core.*; import com.google.common.reflect.TypeToken; import java.math.BigDecimal; import java.math.BigInteger; import java.net.InetAddress; import java.nio.ByteBuffer; import java.util.*; /** * Created by siddharth on 8/9/16. */ public class RowTerminal implements Row { @Override public ColumnDefinitions getColumnDefinitions() { return null; } @Override public Token getToken(int i) { return null; } @Override public Token getToken(String s) { return null; } @Override public Token getPartitionKeyToken() { return null; } @Override public boolean isNull(int i) { return false; } @Override public boolean getBool(int i) { return false; } @Override public byte getByte(int i) { return 0; } @Override public short getShort(int i) { return 0; } @Override public int getInt(int i) { return 0; } @Override public long getLong(int i) { return 0; } @Override public Date getTimestamp(int i) { return null; } @Override public LocalDate getDate(int i) { return null; } @Override public long getTime(int i) { return 0; } @Override public float getFloat(int i) { return 0; } @Override public double getDouble(int i) { return 0; } @Override public ByteBuffer getBytesUnsafe(int i) { return null; } @Override public ByteBuffer getBytes(int i) { return null; } @Override public String getString(int i) { return null; } @Override public BigInteger getVarint(int i) { return null; } @Override public BigDecimal getDecimal(int i) { return null; } @Override public UUID getUUID(int i) { return null; } @Override public InetAddress getInet(int i) { return null; } @Override public <T> List<T> getList(int i, Class<T> aClass) { return null; } @Override public <T> List<T> getList(int i, TypeToken<T> typeToken) { return null; } @Override public <T> Set<T> getSet(int i, Class<T> aClass) { return null; } @Override public <T> Set<T> getSet(int i, TypeToken<T> typeToken) { return null; } @Override public <K, V> Map<K, V> getMap(int i, Class<K> aClass, Class<V> aClass1) { return null; } @Override public <K, V> Map<K, V> getMap(int i, TypeToken<K> typeToken, TypeToken<V> typeToken1) { return null; } @Override public UDTValue getUDTValue(int i) { return null; } @Override public TupleValue getTupleValue(int i) { return null; } @Override public Object getObject(int i) { return null; } @Override public <T> T get(int i, Class<T> aClass) { return null; } @Override public <T> T get(int i, TypeToken<T> typeToken) { return null; } @Override public <T> T get(int i, TypeCodec<T> typeCodec) { return null; } @Override public boolean isNull(String s) { return false; } @Override public boolean getBool(String s) { return false; } @Override public byte getByte(String s) { return 0; } @Override public short getShort(String s) { return 0; } @Override public int getInt(String s) { return 0; } @Override public long getLong(String s) { return 0; } @Override public Date getTimestamp(String s) { return null; } @Override public LocalDate getDate(String s) { return null; } @Override public long getTime(String s) { return 0; } @Override public float getFloat(String s) { return 0; } @Override public double getDouble(String s) { return 0; } @Override public ByteBuffer getBytesUnsafe(String s) { return null; } @Override public ByteBuffer getBytes(String s) { return null; } @Override public String getString(String s) { return null; } @Override public BigInteger getVarint(String s) { return null; } @Override public BigDecimal getDecimal(String s) { return null; } @Override public UUID getUUID(String s) { return null; } @Override public InetAddress getInet(String s) { return null; } @Override public <T> List<T> getList(String s, Class<T> aClass) { return null; } @Override public <T> List<T> getList(String s, TypeToken<T> typeToken) { return null; } @Override public <T> Set<T> getSet(String s, Class<T> aClass) { return null; } @Override public <T> Set<T> getSet(String s, TypeToken<T> typeToken) { return null; } @Override public <K, V> Map<K, V> getMap(String s, Class<K> aClass, Class<V> aClass1) { return null; } @Override public <K, V> Map<K, V> getMap(String s, TypeToken<K> typeToken, TypeToken<V> typeToken1) { return null; } @Override public UDTValue getUDTValue(String s) { return null; } @Override public TupleValue getTupleValue(String s) { return null; } @Override public Object getObject(String s) { return null; } @Override public <T> T get(String s, Class<T> aClass) { return null; } @Override public <T> T get(String s, TypeToken<T> typeToken) { return null; } @Override public <T> T get(String s, TypeCodec<T> typeCodec) { return null; } }
package org.mindinformatics.gwt.framework.component.users.ui; import java.util.ArrayList; import java.util.Set; import org.mindinformatics.gwt.domeo.client.Domeo; import org.mindinformatics.gwt.framework.component.users.model.MUserGroup; import org.mindinformatics.gwt.framework.model.users.IUserGroup; import org.mindinformatics.gwt.framework.model.users.IUserRole; import org.mindinformatics.gwt.framework.src.IApplication; import org.mindinformatics.gwt.framework.src.IContainerPanel; import org.mindinformatics.gwt.framework.src.IContentPanel; import org.mindinformatics.gwt.framework.src.IResizable; import org.mindinformatics.gwt.utils.src.ResourcesUtils; import com.google.gwt.core.client.GWT; import com.google.gwt.event.dom.client.ChangeEvent; import com.google.gwt.event.dom.client.ChangeHandler; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.Anchor; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.Image; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.ListBox; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.TabLayoutPanel; import com.google.gwt.user.client.ui.VerticalPanel; /** * @author Paolo Ciccarese <paolo.ciccarese@gmail.com> */ public class UserAccountViewerPanel extends Composite implements IContentPanel, IResizable { private static final String TITLE = "Account"; interface Binder extends UiBinder<VerticalPanel, UserAccountViewerPanel> { } private static final Binder binder = GWT.create(Binder.class); //private Resources _resources; private IApplication _application; private IContainerPanel _containerPanel; private ArrayList<IUserGroup> _groups; // Layout @UiField VerticalPanel main; @UiField TabLayoutPanel tabToolsPanel; @UiField SimplePanel picturePanel; @UiField Label titlePanel; @UiField Label firstnamePanel; @UiField Label middlenamePanel; @UiField Label lastnamePanel; @UiField Label emailPanel; @UiField Anchor userLink; @UiField FlowPanel groupsPanel; @UiField FlowPanel groupDetails; @UiField Label namePanel; @UiField Label descriptionPanel; @UiField Label permissionPanel; @UiField Anchor groupLink; @UiField Label rolePanel; @UiField FlowPanel infoPanel; @UiField HorizontalPanel footerPanel; public void setContainer(IContainerPanel containerPanel) { _containerPanel = containerPanel; } public IContainerPanel getContainer() { return _containerPanel; } public String getTitle() { return TITLE; } // ------------------------------------------------------------------------ // CREATION OF ANNOTATIONS OF VARIOUS KIND // ------------------------------------------------------------------------ public UserAccountViewerPanel(IApplication application) { _application = application; //_resources = resources; //_listPanel = new LogListPanel(_application); // Create layout initWidget(binder.createAndBindUi(this)); this.setWidth((Window.getClientWidth() - 140) + "px"); // if(_application.getAgentManager().getUserPerson().getPicture()!=null) { // picturePanel.add(ResourcesUtils.getImage(_application.getLogger(), _application.getAgentManager().getUserPerson().getPicture(), // Domeo.resources.unknownPersonIcon())); // } else { picturePanel.add(new Image(Domeo.resources.unknownPersonIcon())); //} titlePanel.setText(_application.getAgentManager().getUserPerson().getTitle()); firstnamePanel.setText(_application.getAgentManager().getUserPerson().getFirstName()); middlenamePanel.setText(_application.getAgentManager().getUserPerson().getMiddleName()); lastnamePanel.setText(_application.getAgentManager().getUserPerson().getLastName()); emailPanel.setText(_application.getAgentManager().getUserPerson().getEmail()); userLink.setText("More info"); userLink.setTarget("_blank"); userLink.setHref(_application.getAgentManager().getUserPerson().getUri()); final ListBox groupsList = new ListBox(); groupsList.setWidth("260px"); groupsList.addChangeHandler(new ChangeHandler() { @Override public void onChange(ChangeEvent event) { int selectedIndex = groupsList.getSelectedIndex(); displayGroupInfo(selectedIndex); } }); retrieveUsersGroups(); groupsList.setVisibleItemCount(10); int counter = 0; for(IUserGroup group: _groups) { if(counter++ == 0) { groupsList.setSelectedIndex(0); displayGroupInfo(0); } groupsList.addItem(group.getDescription(), group.getUri()); } groupsList.setSelectedIndex(0); groupsPanel.add(groupsList); // https://developers.google.com/chart/interactive/docs/gallery // https://google-developers.appspot.com/chart/interactive/docs/gallery/linechart //LineChart pie = new LineChart(createTable(), createOptions()); //pie.addSelectHandler(createSelectHandler(pie)); //infoPanel.add(pie); } /* private Options createOptions() { Options options = Options.create(); options.setWidth(400); options.setHeight(240); //options.set3D(true); options.setTitle("My Activity"); return options; } private AbstractDataTable createTable() { DataTable data = DataTable.create(); data.addColumn(ColumnType.STRING, "Task"); data.addColumn(ColumnType.NUMBER, "Hours per Day"); data.addRows(2); data.setValue(0, 0, "Work"); data.setValue(0, 1, 14); data.setValue(1, 0, "Sleep"); data.setValue(1, 1, 10); return data; } */ public void retrieveUsersGroups() { Set<IUserGroup> groups = _application.getUserManager().getUsersGroups(); _groups = new ArrayList<IUserGroup>(); if(groups!=null) _groups.addAll(groups); } public void displayGroupInfo(Integer groupIndex) { showGroupDetails(_groups.get(groupIndex)); } public void showGroupDetails(IUserGroup group) { namePanel.setText(group.getName()); descriptionPanel.setText(group.getDescription()); if(group.isReadPermission() && group.isWritePermission()) permissionPanel.setText("Permissions: Read and write"); else if(group.isReadPermission() && !group.isWritePermission()) permissionPanel.setText("Permissions: Read only"); else permissionPanel.setText("Permissions unknown (!!!)"); groupLink.setText("More info"); groupLink.setTarget("_blank"); groupLink.setHref(group.getGroupLink()); boolean firstFlag = false; StringBuffer sb = new StringBuffer(); if(group instanceof MUserGroup) { for(IUserRole role: ((MUserGroup)group).getRoles()) { if(firstFlag) sb.append(", "); sb.append(role.getName()); firstFlag = true; } } rolePanel.setText("Roles: " + sb.toString()); } public void hideFooter() { footerPanel.setVisible(false); } public void showProgressMessage() { } public void showCompletionMessage() { } @Override public void resized() { this.setWidth((Window.getClientWidth() - 140) + "px"); tabToolsPanel.setWidth((Window.getClientWidth() - 130) + "px"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.management; import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.RoutingType; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.client.ClientConsumer; import org.apache.activemq.artemis.api.core.client.ClientMessage; import org.apache.activemq.artemis.api.core.client.ClientProducer; import org.apache.activemq.artemis.api.core.client.ClientSession; import org.apache.activemq.artemis.api.core.client.ClientSessionFactory; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.api.core.management.ManagementHelper; import org.apache.activemq.artemis.core.client.impl.ClientSessionInternal; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.ActiveMQServers; import org.apache.activemq.artemis.core.server.plugin.impl.NotificationActiveMQServerPlugin; import org.apache.activemq.artemis.tests.util.ActiveMQTestBase; import org.apache.activemq.artemis.utils.RandomUtil; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.CONNECTION_CREATED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.CONNECTION_DESTROYED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.SESSION_CREATED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.SESSION_CLOSED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.ADDRESS_ADDED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.ADDRESS_REMOVED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.BINDING_ADDED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.BINDING_REMOVED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.CONSUMER_CLOSED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.CONSUMER_CREATED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.MESSAGE_DELIVERED; import static org.apache.activemq.artemis.api.core.management.CoreNotificationType.MESSAGE_EXPIRED; public class NotificationTest extends ActiveMQTestBase { // Constants ----------------------------------------------------- // Attributes ---------------------------------------------------- private ActiveMQServer server; private ClientSession session; private ClientConsumer notifConsumer; private SimpleString notifQueue; private ServerLocator locator; // Static -------------------------------------------------------- // Constructors -------------------------------------------------- // Public -------------------------------------------------------- @Test public void testBINDING_ADDED() throws Exception { SimpleString queue = RandomUtil.randomSimpleString(); SimpleString address = RandomUtil.randomSimpleString(); boolean durable = RandomUtil.randomBoolean(); NotificationTest.flush(notifConsumer); session.createQueue(address, queue, durable); //the first message received will be for the address creation ClientMessage[] notifications = NotificationTest.consumeMessages(2, notifConsumer); Assert.assertEquals(BINDING_ADDED.toString(), notifications[1].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertEquals(queue.toString(), notifications[1].getObjectProperty(ManagementHelper.HDR_ROUTING_NAME).toString()); Assert.assertEquals(address.toString(), notifications[1].getObjectProperty(ManagementHelper.HDR_ADDRESS).toString()); session.deleteQueue(queue); } @Test public void testBINDING_ADDEDWithMatchingFilter() throws Exception { SimpleString queue = RandomUtil.randomSimpleString(); SimpleString address = RandomUtil.randomSimpleString(); boolean durable = RandomUtil.randomBoolean(); System.out.println(queue); notifConsumer.close(); notifConsumer = session.createConsumer(notifQueue.toString(), ManagementHelper.HDR_ROUTING_NAME + "= '" + queue + "'"); NotificationTest.flush(notifConsumer); session.createQueue(address, queue, durable); ClientMessage[] notifications = NotificationTest.consumeMessages(1, notifConsumer); Assert.assertEquals(BINDING_ADDED.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertEquals(queue.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_ROUTING_NAME).toString()); Assert.assertEquals(address.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_ADDRESS).toString()); session.deleteQueue(queue); } @Test public void testBINDING_ADDEDWithNonMatchingFilter() throws Exception { SimpleString queue = RandomUtil.randomSimpleString(); SimpleString address = RandomUtil.randomSimpleString(); boolean durable = RandomUtil.randomBoolean(); System.out.println(queue); notifConsumer.close(); notifConsumer = session.createConsumer(notifQueue.toString(), ManagementHelper.HDR_ROUTING_NAME + " <> '" + queue + "' AND " + ManagementHelper.HDR_ADDRESS + " <> '" + address + "'"); NotificationTest.flush(notifConsumer); session.createQueue(address, queue, durable); NotificationTest.consumeMessages(0, notifConsumer); session.deleteQueue(queue); } @Test public void testBINDING_REMOVED() throws Exception { SimpleString queue = RandomUtil.randomSimpleString(); SimpleString address = RandomUtil.randomSimpleString(); boolean durable = RandomUtil.randomBoolean(); session.createQueue(address, queue, durable); NotificationTest.flush(notifConsumer); session.deleteQueue(queue); //There will be 2 notifications, first is for binding removal, second is for address removal ClientMessage[] notifications = NotificationTest.consumeMessages(2, notifConsumer); Assert.assertEquals(BINDING_REMOVED.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertEquals(queue.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_ROUTING_NAME).toString()); Assert.assertEquals(address.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_ADDRESS).toString()); } @Test public void testCONSUMER_CREATED() throws Exception { ClientSessionFactory sf = createSessionFactory(locator); ClientSession mySession = sf.createSession("myUser", "myPassword", false, true, true, locator.isPreAcknowledge(), locator.getAckBatchSize()); mySession.start(); SimpleString queue = RandomUtil.randomSimpleString(); SimpleString address = RandomUtil.randomSimpleString(); boolean durable = RandomUtil.randomBoolean(); session.createQueue(address, queue, durable); NotificationTest.flush(notifConsumer); ClientConsumer consumer = mySession.createConsumer(queue); SimpleString consumerName = SimpleString.toSimpleString(((ClientSessionInternal) mySession).getName()); ClientMessage[] notifications = NotificationTest.consumeMessages(1, notifConsumer); Assert.assertEquals(CONSUMER_CREATED.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertEquals(queue.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_ROUTING_NAME).toString()); Assert.assertEquals(address.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_ADDRESS).toString()); Assert.assertEquals(1, notifications[0].getObjectProperty(ManagementHelper.HDR_CONSUMER_COUNT)); Assert.assertEquals(SimpleString.toSimpleString("myUser"), notifications[0].getSimpleStringProperty(ManagementHelper.HDR_USER)); Assert.assertEquals(null, notifications[0].getSimpleStringProperty(ManagementHelper.HDR_VALIDATED_USER)); Assert.assertEquals(SimpleString.toSimpleString("invm:0"), notifications[0].getSimpleStringProperty(ManagementHelper.HDR_REMOTE_ADDRESS)); Assert.assertEquals(consumerName, notifications[0].getSimpleStringProperty(ManagementHelper.HDR_SESSION_NAME)); Assert.assertEquals(SimpleString.toSimpleString("unavailable"), notifications[0].getSimpleStringProperty(ManagementHelper.HDR_CERT_SUBJECT_DN)); consumer.close(); session.deleteQueue(queue); } @Test public void testCONSUMER_CLOSED() throws Exception { ClientSessionFactory sf = createSessionFactory(locator); ClientSession mySession = sf.createSession("myUser", "myPassword", false, true, true, locator.isPreAcknowledge(), locator.getAckBatchSize()); mySession.start(); SimpleString queue = RandomUtil.randomSimpleString(); SimpleString address = RandomUtil.randomSimpleString(); boolean durable = RandomUtil.randomBoolean(); mySession.createQueue(address, queue, durable); ClientConsumer consumer = mySession.createConsumer(queue); SimpleString sessionName = SimpleString.toSimpleString(((ClientSessionInternal) mySession).getName()); NotificationTest.flush(notifConsumer); consumer.close(); ClientMessage[] notifications = NotificationTest.consumeMessages(1, notifConsumer); Assert.assertEquals(CONSUMER_CLOSED.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertEquals(queue.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_ROUTING_NAME).toString()); Assert.assertEquals(address.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_ADDRESS).toString()); Assert.assertEquals(0, notifications[0].getObjectProperty(ManagementHelper.HDR_CONSUMER_COUNT)); Assert.assertEquals(SimpleString.toSimpleString("myUser"), notifications[0].getSimpleStringProperty(ManagementHelper.HDR_USER)); Assert.assertEquals(SimpleString.toSimpleString("invm:0"), notifications[0].getSimpleStringProperty(ManagementHelper.HDR_REMOTE_ADDRESS)); Assert.assertEquals(sessionName, notifications[0].getSimpleStringProperty(ManagementHelper.HDR_SESSION_NAME)); session.deleteQueue(queue); } @Test public void testAddressAdded() throws Exception { SimpleString address = RandomUtil.randomSimpleString(); NotificationTest.flush(notifConsumer); session.createAddress(address, RoutingType.ANYCAST, true); ClientMessage[] notifications = NotificationTest.consumeMessages(1, notifConsumer); Assert.assertEquals(ADDRESS_ADDED.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertEquals(RoutingType.ANYCAST.getType(), notifications[0].getObjectProperty(ManagementHelper.HDR_ROUTING_TYPE)); Assert.assertEquals(address.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_ADDRESS).toString()); } @Test public void testAddressRemoved() throws Exception { SimpleString address = RandomUtil.randomSimpleString(); session.createAddress(address, RoutingType.ANYCAST, true); NotificationTest.flush(notifConsumer); server.getPostOffice().removeAddressInfo(address); ClientMessage[] notifications = NotificationTest.consumeMessages(1, notifConsumer); Assert.assertEquals(ADDRESS_REMOVED.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertEquals(RoutingType.ANYCAST.getType(), notifications[0].getObjectProperty(ManagementHelper.HDR_ROUTING_TYPE)); Assert.assertEquals(address.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_ADDRESS).toString()); } @Test public void testConnectionCreatedAndDestroyed() throws Exception { NotificationTest.flush(notifConsumer); ClientSessionFactory sf = createSessionFactory(locator); ClientSession mySession = sf.createSession("myUser", "myPassword", false, true, true, locator.isPreAcknowledge(), locator.getAckBatchSize()); mySession.start(); ClientMessage[] notifications = NotificationTest.consumeMessages(2, notifConsumer); Assert.assertEquals(CONNECTION_CREATED.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertNotNull(notifications[0].getObjectProperty(ManagementHelper.HDR_CONNECTION_NAME)); final String connectionId = notifications[0].getObjectProperty(ManagementHelper.HDR_CONNECTION_NAME).toString(); Assert.assertEquals(SESSION_CREATED.toString(), notifications[1].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertNotNull(notifications[1].getObjectProperty(ManagementHelper.HDR_CONNECTION_NAME)); Assert.assertNotNull(notifications[1].getObjectProperty(ManagementHelper.HDR_SESSION_NAME)); Assert.assertEquals(SimpleString.toSimpleString("myUser"), notifications[1].getObjectProperty(ManagementHelper.HDR_USER)); NotificationTest.flush(notifConsumer); mySession.close(); sf.close(); notifications = NotificationTest.consumeMessages(2, notifConsumer); Assert.assertEquals(SESSION_CLOSED.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertNotNull(notifications[0].getObjectProperty(ManagementHelper.HDR_CONNECTION_NAME)); Assert.assertNotNull(notifications[0].getObjectProperty(ManagementHelper.HDR_SESSION_NAME)); Assert.assertEquals(SimpleString.toSimpleString("myUser"), notifications[0].getObjectProperty(ManagementHelper.HDR_USER)); Assert.assertEquals(CONNECTION_DESTROYED.toString(), notifications[1].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertNotNull(notifications[1].getObjectProperty(ManagementHelper.HDR_CONNECTION_NAME)); Assert.assertEquals(connectionId, notifications[1].getObjectProperty(ManagementHelper.HDR_CONNECTION_NAME).toString()); } @Test public void testMessageDelivered() throws Exception { ClientSessionFactory sf = createSessionFactory(locator); ClientSession mySession = sf.createSession("myUser", "myPassword", false, true, true, locator.isPreAcknowledge(), locator.getAckBatchSize()); mySession.start(); SimpleString queue = RandomUtil.randomSimpleString(); SimpleString address = RandomUtil.randomSimpleString(); boolean durable = RandomUtil.randomBoolean(); session.createQueue(address, queue, durable); ClientConsumer consumer = mySession.createConsumer(queue); ClientProducer producer = mySession.createProducer(address); NotificationTest.flush(notifConsumer); ClientMessage msg = session.createMessage(false); msg.putStringProperty("someKey", "someValue"); producer.send(msg); consumer.receive(1000); ClientMessage[] notifications = NotificationTest.consumeMessages(1, notifConsumer); Assert.assertEquals(MESSAGE_DELIVERED.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertNotNull(notifications[0].getObjectProperty(ManagementHelper.HDR_MESSAGE_ID)); Assert.assertNotNull(notifications[0].getObjectProperty(ManagementHelper.HDR_CONSUMER_NAME)); Assert.assertEquals(address, notifications[0].getObjectProperty(ManagementHelper.HDR_ADDRESS)); Assert.assertEquals(queue, notifications[0].getObjectProperty(ManagementHelper.HDR_ROUTING_NAME)); Assert.assertEquals(RoutingType.MULTICAST.getType(), notifications[0].getObjectProperty(ManagementHelper.HDR_ROUTING_TYPE)); consumer.close(); session.deleteQueue(queue); } @Test public void testMessageExpired() throws Exception { ClientSessionFactory sf = createSessionFactory(locator); ClientSession mySession = sf.createSession("myUser", "myPassword", false, true, true, locator.isPreAcknowledge(), locator.getAckBatchSize()); mySession.start(); SimpleString queue = RandomUtil.randomSimpleString(); SimpleString address = RandomUtil.randomSimpleString(); boolean durable = RandomUtil.randomBoolean(); session.createQueue(address, queue, durable); ClientConsumer consumer = mySession.createConsumer(queue); ClientProducer producer = mySession.createProducer(address); NotificationTest.flush(notifConsumer); ClientMessage msg = session.createMessage(false); msg.putStringProperty("someKey", "someValue"); msg.setExpiration(1); producer.send(msg); Thread.sleep(500); consumer.receive(500); ClientMessage[] notifications = NotificationTest.consumeMessages(1, notifConsumer); Assert.assertEquals(MESSAGE_EXPIRED.toString(), notifications[0].getObjectProperty(ManagementHelper.HDR_NOTIFICATION_TYPE).toString()); Assert.assertNotNull(notifications[0].getObjectProperty(ManagementHelper.HDR_MESSAGE_ID)); Assert.assertEquals(address, notifications[0].getObjectProperty(ManagementHelper.HDR_ADDRESS)); Assert.assertEquals(queue, notifications[0].getObjectProperty(ManagementHelper.HDR_ROUTING_NAME)); Assert.assertEquals(RoutingType.MULTICAST.getType(), notifications[0].getObjectProperty(ManagementHelper.HDR_ROUTING_TYPE)); consumer.close(); session.deleteQueue(queue); } // Package protected --------------------------------------------- // Protected ----------------------------------------------------- @Override @Before public void setUp() throws Exception { super.setUp(); server = addServer(ActiveMQServers.newActiveMQServer(createDefaultInVMConfig(), false)); NotificationActiveMQServerPlugin notificationPlugin = new NotificationActiveMQServerPlugin(); notificationPlugin.setSendAddressNotifications(true); notificationPlugin.setSendConnectionNotifications(true); notificationPlugin.setSendSessionNotifications(true); notificationPlugin.setSendDeliveredNotifications(true); notificationPlugin.setSendExpiredNotifications(true); server.registerBrokerPlugin(notificationPlugin); server.start(); locator = createInVMNonHALocator(); ClientSessionFactory sf = createSessionFactory(locator); session = sf.createSession(false, true, true); session.start(); notifQueue = RandomUtil.randomSimpleString(); session.createQueue(ActiveMQDefaultConfiguration.getDefaultManagementNotificationAddress(), notifQueue, null, false); notifConsumer = session.createConsumer(notifQueue); } // Private ------------------------------------------------------- private static void flush(final ClientConsumer notifConsumer) throws ActiveMQException { ClientMessage message = null; do { message = notifConsumer.receive(500); } while (message != null); } protected static ClientMessage[] consumeMessages(final int expected, final ClientConsumer consumer) throws Exception { ClientMessage[] messages = new ClientMessage[expected]; ClientMessage m = null; for (int i = 0; i < expected; i++) { m = consumer.receive(500); if (m != null) { for (SimpleString key : m.getPropertyNames()) { System.out.println(key + "=" + m.getObjectProperty(key)); } } Assert.assertNotNull("expected to received " + expected + " messages, got only " + i, m); messages[i] = m; m.acknowledge(); } m = consumer.receiveImmediate(); if (m != null) { for (SimpleString key : m.getPropertyNames()) { System.out.println(key + "=" + m.getObjectProperty(key)); } } Assert.assertNull("received one more message than expected (" + expected + ")", m); return messages; } // Inner classes ------------------------------------------------- }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.common.util; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.noggit.CharArr; import com.google.common.cache.Cache; import com.google.common.hash.Hashing; /** * Defines a space-efficient serialization/deserialization format for transferring data. * <p> * JavaBinCodec has built in support many commonly used types. This includes primitive types (boolean, byte, * short, double, int, long, float), common Java containers/utilities (Date, Map, Collection, Iterator, String, * Object[], byte[]), and frequently used Solr types ({@link NamedList}, {@link SolrDocument}, * {@link SolrDocumentList}). Each of the above types has a pair of associated methods which read and write * that type to a stream. * <p> * Classes that aren't supported natively can still be serialized/deserialized by providing * an {@link JavaBinCodec.ObjectResolver} object that knows how to work with the unsupported class. * This allows {@link JavaBinCodec} to be used to marshall/unmarshall arbitrary content. * <p> * NOTE -- {@link JavaBinCodec} instances cannot be reused for more than one marshall or unmarshall operation. */ /* * Original version: https://github.com/apache/lucene-solr/blob/64093d6/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java * * - Removed Solr-specific methods * - Converted internal streams into a regular DataInputStream/DataOutputStream * - Converted Solr Cache and hashing methods to use Guava counterparts */ @SuppressWarnings({"lgtm[java/dereferenced-value-may-be-null]"}) public class JavaBinCodec { public static final byte NULL = 0, BOOL_TRUE = 1, BOOL_FALSE = 2, BYTE = 3, SHORT = 4, DOUBLE = 5, INT = 6, LONG = 7, FLOAT = 8, DATE = 9, MAP = 10, SOLRDOC = 11, SOLRDOCLST = 12, BYTEARR = 13, ITERATOR = 14, /** * this is a special tag signals an end. No value is associated with it */ END = 15, SOLRINPUTDOC = 16, MAP_ENTRY_ITER = 17, ENUM_FIELD_VALUE = 18, MAP_ENTRY = 19, // types that combine tag + length (or other info) in a single byte TAG_AND_LEN = (byte) (1 << 5), STR = (byte) (1 << 5), SINT = (byte) (2 << 5), SLONG = (byte) (3 << 5), ARR = (byte) (4 << 5), // ORDERED_MAP = (byte) (5 << 5), // SimpleOrderedMap (a NamedList subclass, and more common) NAMED_LST = (byte) (6 << 5), // NamedList EXTERN_STRING = (byte) (7 << 5); private static final int MAX_UTF8_SIZE_FOR_ARRAY_GROW_STRATEGY = 65536; private static byte VERSION = 2; private final ObjectResolver resolver; protected DataOutputStream daos; private StringCache stringCache; private WritableDocFields writableDocFields; private boolean alreadyMarshalled; private boolean alreadyUnmarshalled; public JavaBinCodec() { resolver =null; writableDocFields =null; } /** * Use this to use this as a PushWriter. ensure that close() is called explicitly after use * * @param os The output stream */ public JavaBinCodec(OutputStream os, ObjectResolver resolver) throws IOException { this.resolver = resolver; initWrite(os); } public JavaBinCodec(ObjectResolver resolver) { this(resolver, null); } public JavaBinCodec setWritableDocFields(WritableDocFields writableDocFields){ this.writableDocFields = writableDocFields; return this; } public JavaBinCodec(ObjectResolver resolver, StringCache stringCache) { this.resolver = resolver; this.stringCache = stringCache; } public ObjectResolver getResolver() { return resolver; } public void marshal(Object nl, OutputStream os) throws IOException { try { initWrite(os); writeVal(nl); } finally { alreadyMarshalled = true; daos.flush(); } } protected void initWrite(OutputStream os) throws IOException { assert !alreadyMarshalled; init(wrapOutputStream(os)); daos.writeByte(VERSION); } private static DataOutputStream wrapOutputStream(OutputStream os) { return os instanceof DataOutputStream ? (DataOutputStream) os : new DataOutputStream(os); } /** expert: sets a new output stream */ public void init(DataOutputStream os) { daos = os; } byte version; public Object unmarshal(InputStream is) throws IOException { DataInputStream dis = initRead(is); return readVal(dis); } protected DataInputStream initRead(InputStream is) throws IOException { assert !alreadyUnmarshalled; DataInputStream dis = wrapInputStream(is); version = dis.readByte(); if (version != VERSION) { throw new IllegalArgumentException("Invalid version (expected " + VERSION + ", but " + version + ") or the data in not in 'javabin' format"); } alreadyUnmarshalled = true; return dis; } private static DataInputStream wrapInputStream(InputStream is) { return is instanceof DataInputStream ? (DataInputStream) is : new DataInputStream(is); } public void writeVal(Object val) throws IOException { if (writeKnownType(val)) { return; } else { ObjectResolver resolver = null; if(val instanceof ObjectResolver) { resolver = (ObjectResolver)val; } else { resolver = this.resolver; } if (resolver != null) { Object tmpVal = resolver.resolve(val, this); if (tmpVal == null) return; // null means the resolver took care of it fully if (writeKnownType(tmpVal)) return; } } // Fallback to do *something*. // note: if the user of this codec doesn't want this (e.g. UpdateLog) it can supply an ObjectResolver that does // something else like throw an exception. writeVal(val.getClass().getName() + ':' + val.toString()); } protected static final Object END_OBJ = new Object(); protected byte tagByte; public Object readVal(DataInputStream dis) throws IOException { tagByte = dis.readByte(); return readObject(dis); } protected Object readObject(DataInputStream dis) throws IOException { // if ((tagByte & 0xe0) == 0) { // if top 3 bits are clear, this is a normal tag // OK, try type + size in single byte switch (tagByte >>> 5) { case STR >>> 5: return readStr(dis); case SINT >>> 5: return readSmallInt(dis); case SLONG >>> 5: return readSmallLong(dis); case ARR >>> 5: return readArray(dis); case EXTERN_STRING >>> 5: return readExternString(dis); } switch (tagByte) { case NULL: return null; case DATE: return new Date(dis.readLong()); case INT: return dis.readInt(); case BOOL_TRUE: return Boolean.TRUE; case BOOL_FALSE: return Boolean.FALSE; case FLOAT: return dis.readFloat(); case DOUBLE: return dis.readDouble(); case LONG: return dis.readLong(); case BYTE: return dis.readByte(); case SHORT: return dis.readShort(); case MAP: return readMap(dis); case BYTEARR: return readByteArray(dis); case ITERATOR: return readIterator(dis); case END: return END_OBJ; case MAP_ENTRY: return readMapEntry(dis); case MAP_ENTRY_ITER: return readMapIter(dis); } throw new RuntimeException("Unknown type " + tagByte); } public boolean writeKnownType(Object val) throws IOException { if (writePrimitive(val)) return true; if (val instanceof Collection) { writeArray((Collection) val); return true; } if (val instanceof Object[]) { writeArray((Object[]) val); return true; } if (val instanceof Map) { writeMap((Map) val); return true; } if (val instanceof Iterator) { writeIterator((Iterator) val); return true; } if (val instanceof Path) { writeStr(((Path) val).toAbsolutePath().toString()); return true; } if (val instanceof Iterable) { writeIterator(((Iterable) val).iterator()); return true; } if (val instanceof Map.Entry) { writeMapEntry((Map.Entry)val); return true; } if (val instanceof AtomicInteger) { writeInt(((AtomicInteger) val).get()); return true; } if (val instanceof AtomicLong) { writeLong(((AtomicLong) val).get()); return true; } if (val instanceof AtomicBoolean) { writeBoolean(((AtomicBoolean) val).get()); return true; } return false; } public void writeTag(byte tag) throws IOException { daos.writeByte(tag); } public void writeTag(byte tag, int size) throws IOException { if ((tag & 0xe0) != 0) { if (size < 0x1f) { daos.writeByte(tag | size); } else { daos.writeByte(tag | 0x1f); writeVInt(size - 0x1f, daos); } } else { daos.writeByte(tag); writeVInt(size, daos); } } public void writeByteArray(byte[] arr, int offset, int len) throws IOException { writeTag(BYTEARR, len); daos.write(arr, offset, len); } public byte[] readByteArray(DataInputStream dis) throws IOException { byte[] arr = new byte[readVInt(dis)]; dis.readFully(arr); return arr; } //use this to ignore the writable interface because , child docs will ignore the fl flag // is it a good design? private boolean ignoreWritable =false; protected boolean toWrite(String key) { return writableDocFields == null || ignoreWritable || writableDocFields.isWritable(key); } public Map<Object, Object> readMapIter(DataInputStream dis) throws IOException { Map<Object, Object> m = newMap(-1); for (; ; ) { Object key = readVal(dis); if (key == END_OBJ) break; Object val = readVal(dis); m.put(key, val); } return m; } /** * create a new Map object * @param size expected size, -1 means unknown size */ protected Map<Object, Object> newMap(int size) { return size < 0 ? new LinkedHashMap<>() : new LinkedHashMap<>(size); } public Map<Object,Object> readMap(DataInputStream dis) throws IOException { int sz = readVInt(dis); Map<Object, Object> m = newMap(sz); for (int i = 0; i < sz; i++) { Object key = readVal(dis); Object val = readVal(dis); m.put(key, val); } return m; } public void writeIterator(Iterator iter) throws IOException { writeTag(ITERATOR); while (iter.hasNext()) { writeVal(iter.next()); } writeTag(END); } public List<Object> readIterator(DataInputStream fis) throws IOException { ArrayList<Object> l = new ArrayList<>(); while (true) { Object o = readVal(fis); if (o == END_OBJ) break; l.add(o); } return l; } public void writeArray(List l) throws IOException { writeTag(ARR, l.size()); for (int i = 0; i < l.size(); i++) { writeVal(l.get(i)); } } public void writeArray(Collection coll) throws IOException { writeTag(ARR, coll.size()); for (Object o : coll) { writeVal(o); } } public void writeArray(Object[] arr) throws IOException { writeTag(ARR, arr.length); for (int i = 0; i < arr.length; i++) { Object o = arr[i]; writeVal(o); } } public List<Object> readArray(DataInputStream dis) throws IOException { int sz = readSize(dis); ArrayList<Object> l = new ArrayList<>(sz); for (int i = 0; i < sz; i++) { l.add(readVal(dis)); } return l; } public void writeMapEntry(Entry<Object,Object> val) throws IOException { writeTag(MAP_ENTRY); writeVal(val.getKey()); writeVal(val.getValue()); } public Map.Entry<Object,Object> readMapEntry(DataInputStream dis) throws IOException { final Object key = readVal(dis); final Object value = readVal(dis); return new Map.Entry<Object,Object>() { @Override public Object getKey() { return key; } @Override public Object getValue() { return value; } @Override public String toString() { return "MapEntry[" + key + ":" + value + "]"; } @Override public Object setValue(Object value) { throw new UnsupportedOperationException(); } @Override public int hashCode() { int result = 31; result *=31 + getKey().hashCode(); result *=31 + getValue().hashCode(); return result; } @Override public boolean equals(Object obj) { if(this == obj) { return true; } if(!(obj instanceof Entry)) { return false; } Map.Entry<Object, Object> entry = (Entry<Object, Object>) obj; return (this.getKey().equals(entry.getKey()) && this.getValue().equals(entry.getValue())); } }; } /** * write the string as tag+length, with length being the number of UTF-8 bytes */ public void writeStr(CharSequence s) throws IOException { if (s == null) { writeTag(NULL); return; } int end = s.length(); int maxSize = end * ByteUtils.MAX_UTF8_BYTES_PER_CHAR; if (maxSize <= MAX_UTF8_SIZE_FOR_ARRAY_GROW_STRATEGY) { if (bytes == null || bytes.length < maxSize) bytes = new byte[maxSize]; int sz = ByteUtils.UTF16toUTF8(s, 0, end, bytes, 0); writeTag(STR, sz); daos.write(bytes, 0, sz); } else { // double pass logic for large strings, see SOLR-7971 int sz = ByteUtils.calcUTF16toUTF8Length(s, 0, end); writeTag(STR, sz); if (bytes == null || bytes.length < 8192) bytes = new byte[8192]; ByteUtils.writeUTF16toUTF8(s, 0, end, daos, bytes); } } byte[] bytes; CharArr arr = new CharArr(); private StringBytes bytesRef = new StringBytes(bytes,0,0); public String readStr(DataInputStream dis) throws IOException { return readStr(dis,null); } public String readStr(DataInputStream dis, StringCache stringCache) throws IOException { int sz = readSize(dis); if (bytes == null || bytes.length < sz) bytes = new byte[sz]; dis.readFully(bytes, 0, sz); if (stringCache != null) { return stringCache.get(bytesRef.reset(bytes, 0, sz)); } else { arr.reset(); ByteUtils.UTF8toUTF16(bytes, 0, sz, arr); return arr.toString(); } } public void writeInt(int val) throws IOException { if (val > 0) { int b = SINT | (val & 0x0f); if (val >= 0x0f) { b |= 0x10; daos.writeByte(b); writeVInt(val >>> 4, daos); } else { daos.writeByte(b); } } else { daos.writeByte(INT); daos.writeInt(val); } } public int readSmallInt(DataInputStream dis) throws IOException { int v = tagByte & 0x0F; if ((tagByte & 0x10) != 0) v = (readVInt(dis) << 4) | v; return v; } public void writeLong(long val) throws IOException { if ((val & 0xff00000000000000L) == 0) { int b = SLONG | ((int) val & 0x0f); if (val >= 0x0f) { b |= 0x10; daos.writeByte(b); writeVLong(val >>> 4, daos); } else { daos.writeByte(b); } } else { daos.writeByte(LONG); daos.writeLong(val); } } public long readSmallLong(DataInputStream dis) throws IOException { long v = tagByte & 0x0F; if ((tagByte & 0x10) != 0) v = (readVLong(dis) << 4) | v; return v; } public void writeFloat(float val) throws IOException { daos.writeByte(FLOAT); daos.writeFloat(val); } public boolean writePrimitive(Object val) throws IOException { if (val == null) { daos.writeByte(NULL); return true; } else if (val instanceof CharSequence) { writeStr((CharSequence) val); return true; } else if (val instanceof Number) { if (val instanceof Integer) { writeInt(((Integer) val).intValue()); return true; } else if (val instanceof Long) { writeLong(((Long) val).longValue()); return true; } else if (val instanceof Float) { writeFloat(((Float) val).floatValue()); return true; } else if (val instanceof Double) { writeDouble(((Double) val).doubleValue()); return true; } else if (val instanceof Byte) { daos.writeByte(BYTE); daos.writeByte(((Byte) val).intValue()); return true; } else if (val instanceof Short) { daos.writeByte(SHORT); daos.writeShort(((Short) val).intValue()); return true; } return false; } else if (val instanceof Date) { daos.writeByte(DATE); daos.writeLong(((Date) val).getTime()); return true; } else if (val instanceof Boolean) { writeBoolean((Boolean) val); return true; } else if (val instanceof byte[]) { writeByteArray((byte[]) val, 0, ((byte[]) val).length); return true; } else if (val instanceof ByteBuffer) { ByteBuffer buf = (ByteBuffer) val; writeByteArray(buf.array(),buf.position(),buf.limit() - buf.position()); return true; } else if (val == END_OBJ) { writeTag(END); return true; } return false; } protected void writeBoolean(boolean val) throws IOException { if (val) daos.writeByte(BOOL_TRUE); else daos.writeByte(BOOL_FALSE); } protected void writeDouble(double val) throws IOException { daos.writeByte(DOUBLE); daos.writeDouble(val); } public void writeMap(Map<?,?> val) throws IOException { writeTag(MAP, val.size()); for (Map.Entry<?,?> entry : val.entrySet()) { Object key = entry.getKey(); if (key instanceof String) { writeExternString((String) key); } else { writeVal(key); } writeVal(entry.getValue()); } } public int readSize(DataInputStream in) throws IOException { int sz = tagByte & 0x1f; if (sz == 0x1f) sz += readVInt(in); return sz; } /** * Special method for variable length int (copied from lucene). Usually used for writing the length of a * collection/array/map In most of the cases the length can be represented in one byte (length &lt; 127) so it saves 3 * bytes/object * * @throws IOException If there is a low-level I/O error. */ public static void writeVInt(int i, DataOutputStream out) throws IOException { while ((i & ~0x7F) != 0) { out.writeByte((byte) ((i & 0x7f) | 0x80)); i >>>= 7; } out.writeByte((byte) i); } /** * The counterpart for {@link #writeVInt(int, DataOutputStream)} * * @throws IOException If there is a low-level I/O error. */ public static int readVInt(DataInputStream in) throws IOException { byte b = in.readByte(); int i = b & 0x7F; for (int shift = 7; (b & 0x80) != 0; shift += 7) { b = in.readByte(); i |= (b & 0x7F) << shift; } return i; } public static void writeVLong(long i, DataOutputStream out) throws IOException { while ((i & ~0x7F) != 0) { out.writeByte((byte) ((i & 0x7f) | 0x80)); i >>>= 7; } out.writeByte((byte) i); } public static long readVLong(DataInputStream in) throws IOException { byte b = in.readByte(); long i = b & 0x7F; for (int shift = 7; (b & 0x80) != 0; shift += 7) { b = in.readByte(); i |= (long) (b & 0x7F) << shift; } return i; } private int stringsCount = 0; private Map<String, Integer> stringsMap; private List<String> stringsList; public void writeExternString(String s) throws IOException { if (s == null) { writeTag(NULL); return; } Integer idx = stringsMap == null ? null : stringsMap.get(s); if (idx == null) idx = 0; writeTag(EXTERN_STRING, idx); if (idx == 0) { writeStr(s); if (stringsMap == null) stringsMap = new HashMap<>(); stringsMap.put(s, ++stringsCount); } } public String readExternString(DataInputStream fis) throws IOException { int idx = readSize(fis); if (idx != 0) {// idx != 0 is the index of the extern string return stringsList.get(idx - 1); } else {// idx == 0 means it has a string value tagByte = fis.readByte(); String s = readStr(fis, stringCache); if (stringsList == null) stringsList = new ArrayList<>(); stringsList.add(s); return s; } } /** * Allows extension of {@link JavaBinCodec} to support serialization of arbitrary data types. * <p> * Implementors of this interface write a method to serialize a given object using an existing {@link JavaBinCodec} */ public interface ObjectResolver { /** * Examine and attempt to serialize the given object, using a {@link JavaBinCodec} to write it to a stream. * * @param o the object that the caller wants serialized. * @param codec used to actually serialize {@code o}. * @return the object {@code o} itself if it could not be serialized, or {@code null} if the whole object was successfully serialized. * @see JavaBinCodec */ Object resolve(Object o, JavaBinCodec codec) throws IOException; } public interface WritableDocFields { boolean isWritable(String name); boolean wantsAllFields(); } public static class StringCache { private final Cache<StringBytes, String> cache; public StringCache(Cache<StringBytes, String> cache) { this.cache = cache; } public String get(StringBytes b) { String result = cache.getIfPresent(b); if (result == null) { //make a copy because the buffer received may be changed later by the caller StringBytes copy = new StringBytes(Arrays.copyOfRange(b.bytes, b.offset, b.offset + b.length), 0, b.length); CharArr arr = new CharArr(); ByteUtils.UTF8toUTF16(b.bytes, b.offset, b.length, arr); result = arr.toString(); cache.put(copy, result); } return result; } } public static class StringBytes { byte[] bytes; /** * Offset of first valid byte. */ int offset; /** * Length of used bytes. */ private int length; private int hash; public StringBytes(byte[] bytes, int offset, int length) { reset(bytes, offset, length); } StringBytes reset(byte[] bytes, int offset, int length) { this.bytes = bytes; this.offset = offset; this.length = length; hash = bytes == null ? 0 : Hashing.murmur3_32().hashBytes(bytes, offset, length).asInt(); return this; } @Override public boolean equals(Object other) { if (other == null) { return false; } if (other instanceof StringBytes) { return this.bytesEquals((StringBytes) other); } return false; } boolean bytesEquals(StringBytes other) { assert other != null; if (length == other.length) { int otherUpto = other.offset; final byte[] otherBytes = other.bytes; final int end = offset + length; for (int upto = offset; upto < end; upto++, otherUpto++) { if (bytes[upto] != otherBytes[otherUpto]) { return false; } } return true; } else { return false; } } @Override public int hashCode() { return hash; } } public void close() throws IOException { if (daos != null) { daos.flush(); } } }
//======================================================================== //$Id: HttpGenerator.java,v 1.7 2005/11/25 21:17:12 gregwilkins Exp $ //Copyright 2004-2005 Mort Bay Consulting Pty. Ltd. //------------------------------------------------------------------------ //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at //http://www.apache.org/licenses/LICENSE-2.0 //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. //======================================================================== package org.mortbay.jetty; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletResponse; import org.mortbay.io.Buffer; import org.mortbay.io.Buffers; import org.mortbay.io.ByteArrayBuffer; import org.mortbay.io.EndPoint; import org.mortbay.io.View; import org.mortbay.log.Log; import org.mortbay.util.ByteArrayOutputStream2; import org.mortbay.util.StringUtil; import org.mortbay.util.TypeUtil; /* ------------------------------------------------------------ */ /** * Abstract Generator. Builds HTTP Messages. * * Currently this class uses a system parameter "jetty.direct.writers" to control * two optional writer to byte conversions. buffer.writers=true will probably be * faster, but will consume more memory. This option is just for testing and tuning. * * @author gregw * */ public abstract class AbstractGenerator implements Generator { // states public final static int STATE_HEADER = 0; public final static int STATE_CONTENT = 2; public final static int STATE_FLUSHING = 3; public final static int STATE_END = 4; private static byte[] NO_BYTES = {}; private static int MAX_OUTPUT_CHARS = 512; private static Buffer[] __reasons = new Buffer[505]; static { Field[] fields = HttpServletResponse.class.getDeclaredFields(); for (int i=0;i<fields.length;i++) { if ((fields[i].getModifiers()&Modifier.STATIC)!=0 && fields[i].getName().startsWith("SC_")) { try { int code = fields[i].getInt(null); if (code<__reasons.length) __reasons[code]=new ByteArrayBuffer(fields[i].getName().substring(3)); } catch(IllegalAccessException e) {} } } } protected static Buffer getReasonBuffer(int code) { Buffer reason=(code<__reasons.length)?__reasons[code]:null; return reason==null?null:reason; } public static String getReason(int code) { Buffer reason=(code<__reasons.length)?__reasons[code]:null; return reason==null?TypeUtil.toString(code):reason.toString(); } // data protected int _state = STATE_HEADER; protected int _status = 0; protected int _version = HttpVersions.HTTP_1_1_ORDINAL; protected Buffer _reason; protected Buffer _method; protected String _uri; protected long _contentWritten = 0; protected long _contentLength = HttpTokens.UNKNOWN_CONTENT; protected boolean _last = false; protected boolean _head = false; protected boolean _noContent = false; protected boolean _close = false; protected Buffers _buffers; // source of buffers protected EndPoint _endp; protected int _headerBufferSize; protected int _contentBufferSize; protected Buffer _header; // Buffer for HTTP header (and maybe small _content) protected Buffer _buffer; // Buffer for copy of passed _content protected Buffer _content; // Buffer passed to addContent private boolean _sendServerVersion; /* ------------------------------------------------------------------------------- */ /** * Constructor. * * @param buffers buffer pool * @param headerBufferSize Size of the buffer to allocate for HTTP header * @param contentBufferSize Size of the buffer to allocate for HTTP content */ public AbstractGenerator(Buffers buffers, EndPoint io, int headerBufferSize, int contentBufferSize) { this._buffers = buffers; this._endp = io; _headerBufferSize=headerBufferSize; _contentBufferSize=contentBufferSize; } /* ------------------------------------------------------------------------------- */ public void reset(boolean returnBuffers) { _state = STATE_HEADER; _status = 0; _version = HttpVersions.HTTP_1_1_ORDINAL; _reason = null; _last = false; _head = false; _noContent=false; _close = false; _contentWritten = 0; _contentLength = HttpTokens.UNKNOWN_CONTENT; synchronized(this) { if (returnBuffers) { if (_header != null) _buffers.returnBuffer(_header); _header = null; if (_buffer != null) _buffers.returnBuffer(_buffer); _buffer = null; } else { if (_header != null) _header.clear(); if (_buffer != null) { _buffers.returnBuffer(_buffer); _buffer = null; } } } _content = null; _method=null; } /* ------------------------------------------------------------------------------- */ public void resetBuffer() { if(_state>=STATE_FLUSHING) throw new IllegalStateException("Flushed"); _last = false; _close = false; _contentWritten = 0; _contentLength = HttpTokens.UNKNOWN_CONTENT; _content=null; if (_buffer!=null) _buffer.clear(); } /* ------------------------------------------------------------ */ /** * @return Returns the contentBufferSize. */ public int getContentBufferSize() { return _contentBufferSize; } /* ------------------------------------------------------------ */ /** * @param contentBufferSize The contentBufferSize to set. */ public void increaseContentBufferSize(int contentBufferSize) { if (contentBufferSize > _contentBufferSize) { _contentBufferSize = contentBufferSize; if (_buffer != null) { Buffer nb = _buffers.getBuffer(_contentBufferSize); nb.put(_buffer); _buffers.returnBuffer(_buffer); _buffer = nb; } } } /* ------------------------------------------------------------ */ public Buffer getUncheckedBuffer() { return _buffer; } /* ------------------------------------------------------------ */ public boolean getSendServerVersion () { return _sendServerVersion; } /* ------------------------------------------------------------ */ public void setSendServerVersion (boolean sendServerVersion) { _sendServerVersion = sendServerVersion; } /* ------------------------------------------------------------ */ public int getState() { return _state; } /* ------------------------------------------------------------ */ public boolean isState(int state) { return _state == state; } /* ------------------------------------------------------------ */ public boolean isComplete() { return _state == STATE_END; } /* ------------------------------------------------------------ */ public boolean isIdle() { return _state == STATE_HEADER && _method==null && _status==0; } /* ------------------------------------------------------------ */ public boolean isCommitted() { return _state != STATE_HEADER; } /* ------------------------------------------------------------ */ /** * @return Returns the head. */ public boolean isHead() { return _head; } /* ------------------------------------------------------------ */ public void setContentLength(long value) { if (value<0) _contentLength=HttpTokens.UNKNOWN_CONTENT; else _contentLength=value; } /* ------------------------------------------------------------ */ /** * @param head The head to set. */ public void setHead(boolean head) { _head = head; } /* ------------------------------------------------------------ */ /** * @return <code>false</code> if the connection should be closed after a request has been read, * <code>true</code> if it should be used for additional requests. */ public boolean isPersistent() { return !_close; } /* ------------------------------------------------------------ */ public void setPersistent(boolean persistent) { _close=!persistent; } /* ------------------------------------------------------------ */ /** * @param version The version of the client the response is being sent to (NB. Not the version * in the response, which is the version of the server). */ public void setVersion(int version) { if (_state != STATE_HEADER) throw new IllegalStateException("STATE!=START"); _version = version; if (_version==HttpVersions.HTTP_0_9_ORDINAL && _method!=null) _noContent=true; } /* ------------------------------------------------------------ */ public int getVersion() { return _version; } /* ------------------------------------------------------------ */ /** */ public void setRequest(String method, String uri) { if (method==null || HttpMethods.GET.equals(method) ) _method=HttpMethods.GET_BUFFER; else _method=HttpMethods.CACHE.lookup(method); _uri=uri; if (_version==HttpVersions.HTTP_0_9_ORDINAL) _noContent=true; } /* ------------------------------------------------------------ */ /** * @param status The status code to send. * @param reason the status message to send. */ public void setResponse(int status, String reason) { if (_state != STATE_HEADER) throw new IllegalStateException("STATE!=START"); _status = status; if (reason!=null) { int len=reason.length(); if (len>_headerBufferSize/2) len=_headerBufferSize/2; _reason=new ByteArrayBuffer(len); for (int i=0;i<len;i++) { char ch = reason.charAt(i); if (ch!='\r'&&ch!='\n') _reason.put((byte)ch); else _reason.put((byte)' '); } } } /* ------------------------------------------------------------ */ /** Prepare buffer for unchecked writes. * Prepare the generator buffer to receive unchecked writes * @return the available space in the buffer. * @throws IOException */ protected abstract int prepareUncheckedAddContent() throws IOException; /* ------------------------------------------------------------ */ void uncheckedAddContent(int b) { _buffer.put((byte)b); } /* ------------------------------------------------------------ */ void completeUncheckedAddContent() { if (_noContent) { if(_buffer!=null) _buffer.clear(); return; } else { _contentWritten+=_buffer.length(); if (_head) _buffer.clear(); } } /* ------------------------------------------------------------ */ public boolean isBufferFull() { if (_buffer != null && _buffer.space()==0) { if (_buffer.length()==0 && !_buffer.isImmutable()) _buffer.compact(); return _buffer.space()==0; } return _content!=null && _content.length()>0; } /* ------------------------------------------------------------ */ public boolean isContentWritten() { return _contentLength>=0 && _contentWritten>=_contentLength; } /* ------------------------------------------------------------ */ public abstract void completeHeader(HttpFields fields, boolean allContentAdded) throws IOException; /* ------------------------------------------------------------ */ /** * Complete the message. * * @throws IOException */ public void complete() throws IOException { if (_state == STATE_HEADER) { throw new IllegalStateException("State==HEADER"); } if (_contentLength >= 0 && _contentLength != _contentWritten && !_head) { if (Log.isDebugEnabled()) Log.debug("ContentLength written=="+_contentWritten+" != contentLength=="+_contentLength); _close = true; } } /* ------------------------------------------------------------ */ public abstract long flush() throws IOException; /* ------------------------------------------------------------ */ /** * Utility method to send an error response. If the builder is not committed, this call is * equivalent to a setResponse, addcontent and complete call. * * @param code * @param reason * @param content * @param close * @throws IOException */ public void sendError(int code, String reason, String content, boolean close) throws IOException { if (!isCommitted()) { setResponse(code, reason); _close = close; completeHeader(null, false); if (content != null) addContent(new View(new ByteArrayBuffer(content)), Generator.LAST); complete(); } } /* ------------------------------------------------------------ */ /** * @return Returns the contentWritten. */ public long getContentWritten() { return _contentWritten; } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /** Output. * * <p> * Implements {@link javax.servlet.ServletOutputStream} from the {@link javax.servlet} package. * </p> * A {@link ServletOutputStream} implementation that writes content * to a {@link AbstractGenerator}. The class is designed to be reused * and can be reopened after a close. */ public static class Output extends ServletOutputStream { protected AbstractGenerator _generator; protected long _maxIdleTime; protected ByteArrayBuffer _buf = new ByteArrayBuffer(NO_BYTES); protected boolean _closed; // These are held here for reuse by Writer String _characterEncoding; Writer _converter; char[] _chars; ByteArrayOutputStream2 _bytes; /* ------------------------------------------------------------ */ public Output(AbstractGenerator generator, long maxIdleTime) { _generator=generator; _maxIdleTime=maxIdleTime; } /* ------------------------------------------------------------ */ /* * @see java.io.OutputStream#close() */ public void close() throws IOException { _closed=true; } /* ------------------------------------------------------------ */ void blockForOutput() throws IOException { if (_generator._endp.isBlocking()) { try { flush(); } catch(IOException e) { _generator._endp.close(); throw e; } } else { if (!_generator._endp.blockWritable(_maxIdleTime)) { _generator._endp.close(); throw new EofException("timeout"); } _generator.flush(); } } /* ------------------------------------------------------------ */ void reopen() { _closed=false; } /* ------------------------------------------------------------ */ public void flush() throws IOException { // block until everything is flushed Buffer content = _generator._content; Buffer buffer = _generator._buffer; if (content!=null && content.length()>0 || buffer!=null && buffer.length()>0 || _generator.isBufferFull()) { _generator.flush(); while ((content!=null && content.length()>0 ||buffer!=null && buffer.length()>0) && _generator._endp.isOpen()) blockForOutput(); } } /* ------------------------------------------------------------ */ public void write(byte[] b, int off, int len) throws IOException { _buf.wrap(b, off, len); write(_buf); } /* ------------------------------------------------------------ */ /* * @see java.io.OutputStream#write(byte[]) */ public void write(byte[] b) throws IOException { _buf.wrap(b); write(_buf); } /* ------------------------------------------------------------ */ /* * @see java.io.OutputStream#write(int) */ public void write(int b) throws IOException { if (_closed) throw new IOException("Closed"); if (!_generator._endp.isOpen()) throw new EofException(); // Block until we can add _content. while (_generator.isBufferFull()) { blockForOutput(); if (_closed) throw new IOException("Closed"); if (!_generator._endp.isOpen()) throw new EofException(); } // Add the _content if (_generator.addContent((byte)b)) // Buffers are full so flush. flush(); if (_generator.isContentWritten()) { flush(); close(); } } /* ------------------------------------------------------------ */ private void write(Buffer buffer) throws IOException { if (_closed) throw new IOException("Closed"); if (!_generator._endp.isOpen()) throw new EofException(); // Block until we can add _content. while (_generator.isBufferFull()) { blockForOutput(); if (_closed) throw new IOException("Closed"); if (!_generator._endp.isOpen()) throw new EofException(); } // Add the _content _generator.addContent(buffer, Generator.MORE); // Have to flush and complete headers? if (_generator.isBufferFull()) flush(); if (_generator.isContentWritten()) { flush(); close(); } // Block until our buffer is free while (buffer.length() > 0 && _generator._endp.isOpen()) blockForOutput(); } /* ------------------------------------------------------------ */ /* * @see javax.servlet.ServletOutputStream#print(java.lang.String) */ public void print(String s) throws IOException { write(s.getBytes()); } } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /** OutputWriter. * A writer that can wrap a {@link Output} stream and provide * character encodings. * * The UTF-8 encoding is done by this class and no additional * buffers or Writers are used. * The UTF-8 code was inspired by http://javolution.org */ public static class OutputWriter extends Writer { private static final int WRITE_CONV = 0; private static final int WRITE_ISO1 = 1; private static final int WRITE_UTF8 = 2; Output _out; AbstractGenerator _generator; int _writeMode; int _surrogate; /* ------------------------------------------------------------ */ public OutputWriter(Output out) { _out=out; _generator=_out._generator; } /* ------------------------------------------------------------ */ public void setCharacterEncoding(String encoding) { if (encoding == null || StringUtil.__ISO_8859_1.equalsIgnoreCase(encoding)) { _writeMode = WRITE_ISO1; } else if (StringUtil.__UTF8.equalsIgnoreCase(encoding)) { _writeMode = WRITE_UTF8; } else { _writeMode = WRITE_CONV; if (_out._characterEncoding == null || !_out._characterEncoding.equalsIgnoreCase(encoding)) _out._converter = null; // Set lazily in getConverter() } _out._characterEncoding = encoding; if (_out._bytes==null) _out._bytes = new ByteArrayOutputStream2(MAX_OUTPUT_CHARS); } /* ------------------------------------------------------------ */ public void close() throws IOException { _out.close(); } /* ------------------------------------------------------------ */ public void flush() throws IOException { _out.flush(); } /* ------------------------------------------------------------ */ public void write (String s,int offset, int length) throws IOException { while (length > MAX_OUTPUT_CHARS) { write(s, offset, MAX_OUTPUT_CHARS); offset += MAX_OUTPUT_CHARS; length -= MAX_OUTPUT_CHARS; } if (_out._chars==null) { _out._chars = new char[MAX_OUTPUT_CHARS]; } char[] chars = _out._chars; s.getChars(offset, offset + length, chars, 0); write(chars, 0, length); } /* ------------------------------------------------------------ */ public void write (char[] s,int offset, int length) throws IOException { Output out = _out; while (length > 0) { out._bytes.reset(); int chars = length>MAX_OUTPUT_CHARS?MAX_OUTPUT_CHARS:length; switch (_writeMode) { case WRITE_CONV: { Writer converter=getConverter(); converter.write(s, offset, chars); converter.flush(); } break; case WRITE_ISO1: { byte[] buffer=out._bytes.getBuf(); int bytes=out._bytes.getCount(); if (chars>buffer.length-bytes) chars=buffer.length-bytes; for (int i = 0; i < chars; i++) { int c = s[offset+i]; buffer[bytes++]=(byte)(c<256?c:'?'); // ISO-1 and UTF-8 match for 0 - 255 } if (bytes>=0) out._bytes.setCount(bytes); break; } case WRITE_UTF8: { byte[] buffer=out._bytes.getBuf(); int bytes=out._bytes.getCount(); if (bytes+chars>buffer.length) chars=buffer.length-bytes; for (int i = 0; i < chars; i++) { int code = s[offset+i]; if ((code & 0xffffff80) == 0) { // 1b if (bytes+1>buffer.length) { chars=i; break; } buffer[bytes++]=(byte)(code); } else { if((code&0xfffff800)==0) { // 2b if (bytes+2>buffer.length) { chars=i; break; } buffer[bytes++]=(byte)(0xc0|(code>>6)); buffer[bytes++]=(byte)(0x80|(code&0x3f)); } else if((code&0xffff0000)==0) { // 3b if (bytes+3>buffer.length) { chars=i; break; } buffer[bytes++]=(byte)(0xe0|(code>>12)); buffer[bytes++]=(byte)(0x80|((code>>6)&0x3f)); buffer[bytes++]=(byte)(0x80|(code&0x3f)); } else if((code&0xff200000)==0) { // 4b if (bytes+4>buffer.length) { chars=i; break; } buffer[bytes++]=(byte)(0xf0|(code>>18)); buffer[bytes++]=(byte)(0x80|((code>>12)&0x3f)); buffer[bytes++]=(byte)(0x80|((code>>6)&0x3f)); buffer[bytes++]=(byte)(0x80|(code&0x3f)); } else if((code&0xf4000000)==0) { // 5b if (bytes+5>buffer.length) { chars=i; break; } buffer[bytes++]=(byte)(0xf8|(code>>24)); buffer[bytes++]=(byte)(0x80|((code>>18)&0x3f)); buffer[bytes++]=(byte)(0x80|((code>>12)&0x3f)); buffer[bytes++]=(byte)(0x80|((code>>6)&0x3f)); buffer[bytes++]=(byte)(0x80|(code&0x3f)); } else if((code&0x80000000)==0) { // 6b if (bytes+6>buffer.length) { chars=i; break; } buffer[bytes++]=(byte)(0xfc|(code>>30)); buffer[bytes++]=(byte)(0x80|((code>>24)&0x3f)); buffer[bytes++]=(byte)(0x80|((code>>18)&0x3f)); buffer[bytes++]=(byte)(0x80|((code>>12)&0x3f)); buffer[bytes++]=(byte)(0x80|((code>>6)&0x3f)); buffer[bytes++]=(byte)(0x80|(code&0x3f)); } else { buffer[bytes++]=(byte)('?'); } if (bytes==buffer.length) { chars=i+1; break; } } } out._bytes.setCount(bytes); break; } default: throw new IllegalStateException(); } out._bytes.writeTo(out); length-=chars; offset+=chars; } } /* ------------------------------------------------------------ */ private Writer getConverter() throws IOException { if (_out._converter == null) _out._converter = new OutputStreamWriter(_out._bytes, _out._characterEncoding); return _out._converter; } } }
package net.mybluemix.parteg.fornecedor; public class Estoquista extends java.lang.Object implements java.lang.Cloneable { public State state; public Boolean status; public Boolean ativo; public Integer cnpj; public Integer nome; public Integer email; public Integer telefone; public Integer prazoEntregaDias; public Integer endereco_Pais; public Integer endereco_Estado; public Integer endereco_Cidade; public Integer endereco_Logradouro; public Integer endereco_CEP; public AdapterInterface adapter; public AdapterInterfaceDados dados; public String cnpjOk; public String nomeOk; public String emailOk; public String telefoneOk; public Integer prazoEntregaDiasOk; public String endereco_PaisOk; public Integer endereco_EstadoOk; public String endereco_CidadeOk; public String endereco_LogradouroOk; public String endereco_CEPOk; public Estoquista() { state = State.mostrandoAtivos; adapter = new FornecedorPageAdapter(); dados = new Dados(); } public Estoquista clone() { try { return (Estoquista)super.clone(); } catch(Exception e) { e.printStackTrace(java.lang.System.err); } return null; } public void handleEvent(Object... in_colObject) { if(in_colObject.length > 0) { String sEventName = (String)in_colObject[0]; if((state == State.mostrandoAtivos) && (sEventName.compareTo("cadastrarEvent") == 0)) { ativo = ((Boolean)in_colObject[1]).booleanValue(); cnpj = ((Integer)in_colObject[2]).intValue(); nome = ((Integer)in_colObject[3]).intValue(); email = ((Integer)in_colObject[4]).intValue(); telefone = ((Integer)in_colObject[5]).intValue(); prazoEntregaDias = ((Integer)in_colObject[6]).intValue(); endereco_Pais = ((Integer)in_colObject[7]).intValue(); endereco_Estado = (Integer) in_colObject[8]; endereco_Cidade = ((Integer)in_colObject[9]).intValue(); endereco_Logradouro = ((Integer)in_colObject[10]).intValue(); endereco_CEP = ((Integer)in_colObject[11]).intValue(); cnpjOk = dados.dado_cnpj(cnpj); nomeOk = dados.dado_nome(nome); emailOk = dados.dado_email(email); telefoneOk = dados.dado_telefone(telefone); prazoEntregaDiasOk = dados.dado_prazoEntregaDias(prazoEntregaDias); endereco_PaisOk = dados.dado_pais(endereco_Pais); endereco_EstadoOk = dados.dado_estado(endereco_Estado); endereco_CidadeOk = dados.dado_cidade(endereco_Cidade); endereco_LogradouroOk = dados.dado_logradouro(endereco_Logradouro); endereco_CEPOk = dados.dado_CEP(endereco_CEP); //System.out.println(cnpj + " " + cnpj_send); //System.out.println(nome + " " + nome_send); //System.out.println(email + " " + email_send); //System.out.println(telefone + " " + telefone_send); //System.out.println(prazoEntregaDias + " " + prazoEntregaDias_send); //System.out.println(endereco_Pais + " " + pais_send); //System.out.println(endereco_Estado + " " + estado_send); //System.out.println(endereco_Cidade + " " + cidade_send); //System.out.println(logradouro_send + " " + logradouro_send); //System.out.println(endereco_CEP + " " + CEP_send); if (cadastrar(ativo, cnpjOk, nomeOk, emailOk, telefoneOk, prazoEntregaDiasOk, endereco_PaisOk, endereco_EstadoOk, endereco_CidadeOk, endereco_LogradouroOk, endereco_CEPOk)) state = State.cadastrando; else state = State.mostrandoAtivos; } else if((state == State.mostrandoInativos) && (sEventName.compareTo("cadastrarEvent") == 0)) { ativo = ((Boolean)in_colObject[1]).booleanValue(); cnpj = ((Integer)in_colObject[2]).intValue(); nome = ((Integer)in_colObject[3]).intValue(); email = ((Integer)in_colObject[4]).intValue(); telefone = ((Integer)in_colObject[5]).intValue(); prazoEntregaDias = ((Integer)in_colObject[6]).intValue(); endereco_Pais = ((Integer)in_colObject[7]).intValue(); endereco_Estado = ((Integer)in_colObject[8]).intValue(); endereco_Cidade = ((Integer)in_colObject[9]).intValue(); endereco_Logradouro = ((Integer)in_colObject[10]).intValue(); endereco_CEP = ((Integer)in_colObject[11]).intValue(); cnpjOk = dados.dado_cnpj(cnpj); nomeOk = dados.dado_nome(nome); emailOk = dados.dado_email(email); telefoneOk = dados.dado_telefone(telefone); prazoEntregaDiasOk = dados.dado_prazoEntregaDias(prazoEntregaDias); endereco_PaisOk = dados.dado_pais(endereco_Pais); endereco_EstadoOk = dados.dado_estado(endereco_Estado); endereco_CidadeOk = dados.dado_cidade(endereco_Cidade); endereco_LogradouroOk = dados.dado_logradouro(endereco_Logradouro); endereco_CEPOk = dados.dado_CEP(endereco_CEP); if (cadastrar(ativo, cnpjOk, nomeOk, emailOk, telefoneOk, prazoEntregaDiasOk, endereco_PaisOk, endereco_EstadoOk, endereco_CidadeOk, endereco_LogradouroOk, endereco_CEPOk)) state = State.cadastrando; else state = State.mostrandoInativos; } else if((state == State.cadastrando) && (sEventName.compareTo("fazerCadastroAtivosEvent") == 0)) { if (fazerCadastroAtivos() && nome>0) state = State.mostrandoAtivos; else state = State.cadastrando; } else if((state == State.cadastrando) && (sEventName.compareTo("fazerCadastroInativosEvent") == 0)) { if (fazerCadastroInativos() && nome>0) state = State.mostrandoInativos; else state = State.cadastrando; } else if((state == State.mostrandoAtivos) && (sEventName.compareTo("alterarEvent") == 0)) { ativo = ((Boolean)in_colObject[1]).booleanValue(); cnpj = ((Integer)in_colObject[2]).intValue(); nome = ((Integer)in_colObject[3]).intValue(); email = ((Integer)in_colObject[4]).intValue(); telefone = ((Integer)in_colObject[5]).intValue(); prazoEntregaDias = ((Integer)in_colObject[6]).intValue(); endereco_Pais = ((Integer)in_colObject[7]).intValue(); endereco_Estado = ((Integer)in_colObject[8]).intValue(); endereco_Cidade = ((Integer)in_colObject[9]).intValue(); endereco_Logradouro = ((Integer)in_colObject[10]).intValue(); endereco_CEP = ((Integer)in_colObject[11]).intValue(); cnpjOk = dados.dado_cnpj(cnpj); nomeOk = dados.dado_nome(nome); emailOk = dados.dado_email(email); telefoneOk = dados.dado_telefone(telefone); prazoEntregaDiasOk = dados.dado_prazoEntregaDias(prazoEntregaDias); endereco_PaisOk = dados.dado_pais(endereco_Pais); endereco_EstadoOk = dados.dado_estado(endereco_Estado); endereco_CidadeOk = dados.dado_cidade(endereco_Cidade); endereco_LogradouroOk = dados.dado_logradouro(endereco_Logradouro); endereco_CEPOk = dados.dado_CEP(endereco_CEP); if (alterar(ativo, cnpjOk, nomeOk, emailOk, telefoneOk, prazoEntregaDiasOk, endereco_PaisOk, endereco_EstadoOk, endereco_CidadeOk, endereco_LogradouroOk, endereco_CEPOk)) state = State.alterando; else state = State.mostrandoAtivos; } else if((state == State.mostrandoInativos) && (sEventName.compareTo("alterarEvent") == 0)) { ativo = ((Boolean)in_colObject[1]).booleanValue(); cnpj = ((Integer)in_colObject[2]).intValue(); nome = ((Integer)in_colObject[3]).intValue(); email = ((Integer)in_colObject[4]).intValue(); telefone = ((Integer)in_colObject[5]).intValue(); prazoEntregaDias = ((Integer)in_colObject[6]).intValue(); endereco_Pais = ((Integer)in_colObject[7]).intValue(); endereco_Estado = ((Integer)in_colObject[8]).intValue(); endereco_Cidade = ((Integer)in_colObject[9]).intValue(); endereco_Logradouro = ((Integer)in_colObject[10]).intValue(); endereco_CEP = ((Integer)in_colObject[11]).intValue(); cnpjOk = dados.dado_cnpj(cnpj); nomeOk = dados.dado_nome(nome); emailOk = dados.dado_email(email); telefoneOk = dados.dado_telefone(telefone); prazoEntregaDiasOk = dados.dado_prazoEntregaDias(prazoEntregaDias); endereco_PaisOk = dados.dado_pais(endereco_Pais); endereco_EstadoOk = dados.dado_estado(endereco_Estado); endereco_CidadeOk = dados.dado_cidade(endereco_Cidade); endereco_LogradouroOk = dados.dado_logradouro(endereco_Logradouro); endereco_CEPOk = dados.dado_CEP(endereco_CEP); if (alterar(ativo, cnpjOk, nomeOk, emailOk, telefoneOk, prazoEntregaDiasOk, endereco_PaisOk, endereco_EstadoOk, endereco_CidadeOk, endereco_LogradouroOk, endereco_CEPOk)) state = State.alterando; else state = State.mostrandoInativos; } else if((state == State.alterando) && (sEventName.compareTo("fazerAlteracoesAtivosEvent") == 0)) { if (fazerAlteracoesAtivos() && nome>0) state = State.mostrandoAtivos; else state = State.alterando; } else if((state == State.alterando) && (sEventName.compareTo("fazerAlteracoesInativosEvent") == 0)) { if (fazerAlteracoesInativos() && nome>0) state = State.mostrandoInativos; else state = State.alterando; } else if((state == State.mostrandoAtivos) && (sEventName.compareTo("mostrarInativosEvent") == 0)) { if (mostrarInativos()) { state = State.mostrandoInativos; status=false; } else { state = State.mostrandoAtivos; } } else if((state == State.mostrandoInativos) && (sEventName.compareTo("mostrarAtivosEvent") == 0)) { if (mostrarAtivos()) { state = State.mostrandoAtivos; status=true; } else { state = State.mostrandoInativos; } } } } public Boolean cadastrar(Boolean ativo, String cnpj, String nome, String email, String telefone, Integer prazoEntregaDias, String endereco_Pais, Integer endereco_Estado, String endereco_Cidade, String endereco_Logradouro, String endereco_CEP) { //return true; return adapter.cadastrarEvent(ativo, cnpj, nome, email, telefone, prazoEntregaDias, endereco_Pais, endereco_Estado, endereco_Cidade, endereco_Logradouro, endereco_CEP); } public Boolean fazerCadastroAtivos() { // if(status == true) if(status == true && adapter.fazerCadastroAtivosEvent() == true) return true; else return false; } public Boolean fazerCadastroInativos() { //if(status == false) if(status == false && adapter.fazerCadastroInativosEvent() == true) return true; else return false; } public Boolean alterar(Boolean ativo, String cnpj, String nome, String email, String telefone, Integer prazoEntregaDias, String endereco_Pais, Integer endereco_Estado, String endereco_Cidade, String endereco_Logradouro, String endereco_CEP) { //return true; return adapter.alterarEvent(ativo, cnpj, nome, email, telefone, prazoEntregaDias, endereco_Pais, endereco_Estado, endereco_Cidade, endereco_Logradouro, endereco_CEP); } public Boolean fazerAlteracoesAtivos() { // if(status == true) if(status == true && adapter.fazerAlteracoesAtivosEvent() == true) return true; else return false; } public Boolean fazerAlteracoesInativos() { // if(status == false) if(status == false && adapter.fazerCadastroInativosEvent() == true) return true; else return false; } public Boolean mostrarAtivos() { //return true; return adapter.mostrarAtivosEvent(); } public Boolean mostrarInativos() { //return true; return adapter.mostrarInativosEvent(); } }
/* * Druid - a distributed column store. * Copyright 2012 - 2015 Metamarkets Group Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.druid.indexing.overlord.http; import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.io.ByteSource; import com.google.common.util.concurrent.SettableFuture; import com.google.inject.Inject; import com.metamx.common.logger.Logger; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.common.config.JacksonConfigManager; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.actions.TaskActionHolder; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.TaskMaster; import io.druid.indexing.overlord.TaskQueue; import io.druid.indexing.overlord.TaskRunner; import io.druid.indexing.overlord.TaskRunnerWorkItem; import io.druid.indexing.overlord.TaskStorageQueryAdapter; import io.druid.indexing.overlord.autoscaling.ResourceManagementScheduler; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.metadata.EntryExistsException; import io.druid.tasklogs.TaskLogStreamer; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; import javax.ws.rs.Consumes; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; /** */ @Path("/druid/indexer/v1") public class OverlordResource { private static final Logger log = new Logger(OverlordResource.class); private final TaskMaster taskMaster; private final TaskStorageQueryAdapter taskStorageQueryAdapter; private final TaskLogStreamer taskLogStreamer; private final JacksonConfigManager configManager; private final AuditManager auditManager; private AtomicReference<WorkerBehaviorConfig> workerConfigRef = null; @Inject public OverlordResource( TaskMaster taskMaster, TaskStorageQueryAdapter taskStorageQueryAdapter, TaskLogStreamer taskLogStreamer, JacksonConfigManager configManager, AuditManager auditManager ) throws Exception { this.taskMaster = taskMaster; this.taskStorageQueryAdapter = taskStorageQueryAdapter; this.taskLogStreamer = taskLogStreamer; this.configManager = configManager; this.auditManager = auditManager; } @POST @Path("/task") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response taskPost(final Task task) { return asLeaderWith( taskMaster.getTaskQueue(), new Function<TaskQueue, Response>() { @Override public Response apply(TaskQueue taskQueue) { try { taskQueue.add(task); return Response.ok(ImmutableMap.of("task", task.getId())).build(); } catch (EntryExistsException e) { return Response.status(Response.Status.BAD_REQUEST) .entity(ImmutableMap.of("error", String.format("Task[%s] already exists!", task.getId()))) .build(); } } } ); } @GET @Path("/leader") @Produces(MediaType.APPLICATION_JSON) public Response getLeader() { return Response.ok(taskMaster.getLeader()).build(); } @GET @Path("/task/{taskid}") @Produces(MediaType.APPLICATION_JSON) public Response getTaskPayload(@PathParam("taskid") String taskid) { return optionalTaskResponse(taskid, "payload", taskStorageQueryAdapter.getTask(taskid)); } @GET @Path("/task/{taskid}/status") @Produces(MediaType.APPLICATION_JSON) public Response getTaskStatus(@PathParam("taskid") String taskid) { return optionalTaskResponse(taskid, "status", taskStorageQueryAdapter.getStatus(taskid)); } @GET @Path("/task/{taskid}/segments") @Produces(MediaType.APPLICATION_JSON) public Response getTaskSegments(@PathParam("taskid") String taskid) { final Set<DataSegment> segments = taskStorageQueryAdapter.getInsertedSegments(taskid); return Response.ok().entity(segments).build(); } @POST @Path("/task/{taskid}/shutdown") @Produces(MediaType.APPLICATION_JSON) public Response doShutdown(@PathParam("taskid") final String taskid) { return asLeaderWith( taskMaster.getTaskQueue(), new Function<TaskQueue, Response>() { @Override public Response apply(TaskQueue taskQueue) { taskQueue.shutdown(taskid); return Response.ok(ImmutableMap.of("task", taskid)).build(); } } ); } @GET @Path("/worker") @Produces(MediaType.APPLICATION_JSON) public Response getWorkerConfig() { if (workerConfigRef == null) { workerConfigRef = configManager.watch(WorkerBehaviorConfig.CONFIG_KEY, WorkerBehaviorConfig.class); } return Response.ok(workerConfigRef.get()).build(); } // default value is used for backwards compatibility @POST @Path("/worker") @Consumes(MediaType.APPLICATION_JSON) public Response setWorkerConfig( final WorkerBehaviorConfig workerBehaviorConfig, @HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author, @HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment ) { if (!configManager.set(WorkerBehaviorConfig.CONFIG_KEY, workerBehaviorConfig, new AuditInfo(author, comment))) { return Response.status(Response.Status.BAD_REQUEST).build(); } log.info("Updating Worker configs: %s", workerBehaviorConfig); return Response.ok().build(); } @GET @Path("/worker/history") @Produces(MediaType.APPLICATION_JSON) public Response getWorkerConfigHistory( @QueryParam("interval") final String interval ) { Interval theInterval = interval == null ? null : new Interval(interval); return Response.ok( auditManager.fetchAuditHistory( WorkerBehaviorConfig.CONFIG_KEY, WorkerBehaviorConfig.CONFIG_KEY, theInterval ) ) .build(); } @POST @Path("/action") @Produces(MediaType.APPLICATION_JSON) public <T> Response doAction(final TaskActionHolder<T> holder) { return asLeaderWith( taskMaster.getTaskActionClient(holder.getTask()), new Function<TaskActionClient, Response>() { @Override public Response apply(TaskActionClient taskActionClient) { final Map<String, Object> retMap; // It would be great to verify that this worker is actually supposed to be running the task before // actually doing the action. Some ideas for how that could be done would be using some sort of attempt_id // or token that gets passed around. try { final T ret = taskActionClient.submit(holder.getAction()); retMap = Maps.newHashMap(); retMap.put("result", ret); } catch (IOException e) { log.warn(e, "Failed to perform task action"); return Response.serverError().build(); } return Response.ok().entity(retMap).build(); } } ); } @GET @Path("/waitingTasks") @Produces(MediaType.APPLICATION_JSON) public Response getWaitingTasks() { return workItemsResponse( new Function<TaskRunner, Collection<? extends TaskRunnerWorkItem>>() { @Override public Collection<? extends TaskRunnerWorkItem> apply(TaskRunner taskRunner) { // A bit roundabout, but works as a way of figuring out what tasks haven't been handed // off to the runner yet: final List<Task> activeTasks = taskStorageQueryAdapter.getActiveTasks(); final Set<String> runnersKnownTasks = Sets.newHashSet( Iterables.transform( taskRunner.getKnownTasks(), new Function<TaskRunnerWorkItem, String>() { @Override public String apply(final TaskRunnerWorkItem workItem) { return workItem.getTaskId(); } } ) ); final List<TaskRunnerWorkItem> waitingTasks = Lists.newArrayList(); for (final Task task : activeTasks) { if (!runnersKnownTasks.contains(task.getId())) { waitingTasks.add( // Would be nice to include the real created date, but the TaskStorage API doesn't yet allow it. new TaskRunnerWorkItem( task.getId(), SettableFuture.<TaskStatus>create(), new DateTime(0), new DateTime(0) ) ); } } return waitingTasks; } } ); } @GET @Path("/pendingTasks") @Produces(MediaType.APPLICATION_JSON) public Response getPendingTasks() { return workItemsResponse( new Function<TaskRunner, Collection<? extends TaskRunnerWorkItem>>() { @Override public Collection<? extends TaskRunnerWorkItem> apply(TaskRunner taskRunner) { return taskRunner.getPendingTasks(); } } ); } @GET @Path("/runningTasks") @Produces(MediaType.APPLICATION_JSON) public Response getRunningTasks() { return workItemsResponse( new Function<TaskRunner, Collection<? extends TaskRunnerWorkItem>>() { @Override public Collection<? extends TaskRunnerWorkItem> apply(TaskRunner taskRunner) { return taskRunner.getRunningTasks(); } } ); } @GET @Path("/completeTasks") @Produces(MediaType.APPLICATION_JSON) public Response getCompleteTasks() { final List<TaskResponseObject> completeTasks = Lists.transform( taskStorageQueryAdapter.getRecentlyFinishedTaskStatuses(), new Function<TaskStatus, TaskResponseObject>() { @Override public TaskResponseObject apply(TaskStatus taskStatus) { // Would be nice to include the real created date, but the TaskStorage API doesn't yet allow it. return new TaskResponseObject( taskStatus.getId(), new DateTime(0), new DateTime(0), Optional.of(taskStatus) ); } } ); return Response.ok(completeTasks).build(); } @GET @Path("/workers") @Produces(MediaType.APPLICATION_JSON) public Response getWorkers() { return asLeaderWith( taskMaster.getTaskRunner(), new Function<TaskRunner, Response>() { @Override public Response apply(TaskRunner taskRunner) { return Response.ok(taskRunner.getWorkers()).build(); } } ); } @GET @Path("/scaling") @Produces(MediaType.APPLICATION_JSON) public Response getScalingState() { // Don't use asLeaderWith, since we want to return 200 instead of 503 when missing an autoscaler. final Optional<ResourceManagementScheduler> rms = taskMaster.getResourceManagementScheduler(); if (rms.isPresent()) { return Response.ok(rms.get().getStats()).build(); } else { return Response.ok().build(); } } @GET @Path("/task/{taskid}/log") @Produces("text/plain") public Response doGetLog( @PathParam("taskid") final String taskid, @QueryParam("offset") @DefaultValue("0") final long offset ) { try { final Optional<ByteSource> stream = taskLogStreamer.streamTaskLog(taskid, offset); if (stream.isPresent()) { return Response.ok(stream.get().openStream()).build(); } else { return Response.status(Response.Status.NOT_FOUND) .entity( "No log was found for this task. " + "The task may not exist, or it may not have begun running yet." ) .build(); } } catch (Exception e) { log.warn(e, "Failed to stream log for task %s", taskid); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); } } private Response workItemsResponse(final Function<TaskRunner, Collection<? extends TaskRunnerWorkItem>> fn) { return asLeaderWith( taskMaster.getTaskRunner(), new Function<TaskRunner, Response>() { @Override public Response apply(TaskRunner taskRunner) { return Response.ok( Lists.transform( Lists.newArrayList(fn.apply(taskRunner)), new Function<TaskRunnerWorkItem, TaskResponseObject>() { @Override public TaskResponseObject apply(TaskRunnerWorkItem workItem) { return new TaskResponseObject( workItem.getTaskId(), workItem.getCreatedTime(), workItem.getQueueInsertionTime(), Optional.<TaskStatus>absent() ); } } ) ).build(); } } ); } private <T> Response optionalTaskResponse(String taskid, String objectType, Optional<T> x) { final Map<String, Object> results = Maps.newHashMap(); results.put("task", taskid); if (x.isPresent()) { results.put(objectType, x.get()); return Response.status(Response.Status.OK).entity(results).build(); } else { return Response.status(Response.Status.NOT_FOUND).entity(results).build(); } } private <T> Response asLeaderWith(Optional<T> x, Function<T, Response> f) { if (x.isPresent()) { return f.apply(x.get()); } else { // Encourage client to try again soon, when we'll likely have a redirect set up return Response.status(Response.Status.SERVICE_UNAVAILABLE).build(); } } private static class TaskResponseObject { private final String id; private final DateTime createdTime; private final DateTime queueInsertionTime; private final Optional<TaskStatus> status; private TaskResponseObject( String id, DateTime createdTime, DateTime queueInsertionTime, Optional<TaskStatus> status ) { this.id = id; this.createdTime = createdTime; this.queueInsertionTime = queueInsertionTime; this.status = status; } public String getId() { return id; } public DateTime getCreatedTime() { return createdTime; } public DateTime getQueueInsertionTime() { return queueInsertionTime; } public Optional<TaskStatus> getStatus() { return status; } @JsonValue public Map<String, Object> toJson() { final Map<String, Object> data = Maps.newLinkedHashMap(); data.put("id", id); if (createdTime.getMillis() > 0) { data.put("createdTime", createdTime); } if (queueInsertionTime.getMillis() > 0) { data.put("queueInsertionTime", queueInsertionTime); } if (status.isPresent()) { data.put("statusCode", status.get().getStatusCode().toString()); } return data; } } }
/** * Copyright 2015 StreamSets Inc. * * Licensed under the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.origin.spooldir; import com.google.common.io.Resources; import com.streamsets.pipeline.api.BatchMaker; import com.streamsets.pipeline.api.Record; import com.streamsets.pipeline.config.Compression; import com.streamsets.pipeline.config.DataFormat; import com.streamsets.pipeline.config.OnParseError; import com.streamsets.pipeline.config.PostProcessingOptions; import com.streamsets.pipeline.sdk.SourceRunner; import com.streamsets.pipeline.sdk.StageRunner; import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorOutputStream; import org.apache.commons.compress.compressors.CompressorStreamFactory; import org.apache.commons.io.IOUtils; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.UUID; import static com.streamsets.pipeline.config.OriginAvroSchemaSource.SOURCE; public class TestSpoolDirWithCompression { private static File testDir; @BeforeClass public static void setUpClass() throws IOException, InterruptedException, URISyntaxException, CompressorException { testDir = new File("target", UUID.randomUUID().toString()).getAbsoluteFile(); Assert.assertTrue(testDir.mkdirs()); Files.copy(Paths.get(Resources.getResource("logArchive.zip").toURI()), Paths.get(testDir.getAbsolutePath(), "logArchive1.zip")); Files.copy(Paths.get(Resources.getResource("logArchive.zip").toURI()), Paths.get(testDir.getAbsolutePath(), "logArchive2.zip")); Files.copy(Paths.get(Resources.getResource("logArchive.tar.gz").toURI()), Paths.get(testDir.getAbsolutePath(), "logArchive1.tar.gz")); Files.copy(Paths.get(Resources.getResource("logArchive.tar.gz").toURI()), Paths.get(testDir.getAbsolutePath(), "logArchive2.tar.gz")); Files.copy(Paths.get(Resources.getResource("testAvro.tar.gz").toURI()), Paths.get(testDir.getAbsolutePath(), "testAvro1.tar.gz")); Files.copy(Paths.get(Resources.getResource("testAvro.tar.gz").toURI()), Paths.get(testDir.getAbsolutePath(), "testAvro2.tar.gz")); File bz2File = new File(testDir, "testFile1.bz2"); CompressorOutputStream bzip2 = new CompressorStreamFactory() .createCompressorOutputStream("bzip2", new FileOutputStream(bz2File)); bzip2.write(IOUtils.toByteArray(Resources.getResource("testLogFile.txt").openStream())); bzip2.close(); bz2File = new File(testDir, "testFile2.bz2"); bzip2 = new CompressorStreamFactory() .createCompressorOutputStream("bzip2", new FileOutputStream(bz2File)); bzip2.write(IOUtils.toByteArray(Resources.getResource("testLogFile.txt").openStream())); bzip2.close(); } @Test public void testProduceZipFile() throws Exception { SpoolDirSource source = createZipSource(); SourceRunner runner = new SourceRunner.Builder(SpoolDirDSource.class, source).addOutputLane("lane").build(); runner.runInit(); try { List<Record> allRecords = new ArrayList<>(); String offset = null; for(int i = 0; i < 50; i++) { BatchMaker batchMaker = SourceRunner.createTestBatchMaker("lane"); offset = source.produce(offset, 1000, batchMaker); Assert.assertNotNull(offset); StageRunner.Output output = SourceRunner.getOutput(batchMaker); List<Record> records = output.getRecords().get("lane"); allRecords.addAll(records); } Assert.assertEquals(37044, allRecords.size()); Assert.assertTrue(offset.equals("logArchive2.zip::-1")); } finally { runner.runDestroy(); } } @Test public void testProduceTarGzipTextFile() throws Exception { SpoolDirSource source = createTarGzipSource(); SourceRunner runner = new SourceRunner.Builder(SpoolDirDSource.class, source).addOutputLane("lane").build(); runner.runInit(); try { List<Record> allRecords = new ArrayList<>(); String offset = null; for(int i = 0; i < 50; i++) { BatchMaker batchMaker = SourceRunner.createTestBatchMaker("lane"); offset = source.produce(offset, 1000, batchMaker); Assert.assertNotNull(offset); StageRunner.Output output = SourceRunner.getOutput(batchMaker); List<Record> records = output.getRecords().get("lane"); allRecords.addAll(records); } Assert.assertEquals(37044, allRecords.size()); Assert.assertTrue(offset.equals("logArchive2.tar.gz::-1")); } finally { runner.runDestroy(); } } @Test public void testProduceTarGzipAvroFile() throws Exception { SpoolDirSource source = createTarGzipAvroSource(); SourceRunner runner = new SourceRunner.Builder(SpoolDirDSource.class, source).addOutputLane("lane").build(); runner.runInit(); try { List<Record> allRecords = new ArrayList<>(); String offset = null; for(int i = 0; i < 50; i++) { BatchMaker batchMaker = SourceRunner.createTestBatchMaker("lane"); offset = source.produce(offset, 1000, batchMaker); Assert.assertNotNull(offset); StageRunner.Output output = SourceRunner.getOutput(batchMaker); List<Record> records = output.getRecords().get("lane"); allRecords.addAll(records); } Assert.assertEquals(48000, allRecords.size()); Assert.assertTrue(offset.equals("testAvro2.tar.gz::-1")); } finally { runner.runDestroy(); } } @Test public void testProduceBz2File() throws Exception { SpoolDirSource source = createBz2Source(); SourceRunner runner = new SourceRunner.Builder(SpoolDirDSource.class, source).addOutputLane("lane").build(); runner.runInit(); try { List<Record> allRecords = new ArrayList<>(); String offset = null; for(int i = 0; i < 10; i++) { BatchMaker batchMaker = SourceRunner.createTestBatchMaker("lane"); offset = source.produce(offset, 1000, batchMaker); Assert.assertNotNull(offset); StageRunner.Output output = SourceRunner.getOutput(batchMaker); List<Record> records = output.getRecords().get("lane"); allRecords.addAll(records); } Assert.assertEquals(4, allRecords.size()); Assert.assertTrue(offset.equals("testFile2.bz2::-1")); } finally { runner.runDestroy(); } } private SpoolDirSource createZipSource() { SpoolDirConfigBean conf = new SpoolDirConfigBean(); conf.dataFormat = DataFormat.TEXT; conf.spoolDir = testDir.getAbsolutePath(); conf.batchSize = 1000; conf.overrunLimit = 65; conf.poolingTimeoutSecs = 1; conf.filePattern = "logArchive*.zip"; conf.maxSpoolFiles = 10; conf.initialFileToProcess = null; conf.dataFormatConfig.compression = Compression.ARCHIVE; conf.dataFormatConfig.filePatternInArchive = "*/*.log"; conf.errorArchiveDir = null; conf.postProcessing = PostProcessingOptions.NONE; conf.archiveDir = testDir.getAbsolutePath(); conf.retentionTimeMins = 10; conf.dataFormatConfig.textMaxLineLen = 10; conf.dataFormatConfig.onParseError = OnParseError.ERROR; conf.dataFormatConfig.maxStackTraceLines = 0; return new SpoolDirSource(conf); } private SpoolDirSource createTarGzipSource() { SpoolDirConfigBean conf = new SpoolDirConfigBean(); conf.dataFormat = DataFormat.TEXT; conf.spoolDir = testDir.getAbsolutePath(); conf.batchSize = 1000; conf.overrunLimit = 65; conf.poolingTimeoutSecs = 1; conf.filePattern = "logArchive*.tar.gz"; conf.maxSpoolFiles = 10; conf.initialFileToProcess = null; conf.dataFormatConfig.compression = Compression.COMPRESSED_ARCHIVE; conf.dataFormatConfig.filePatternInArchive = "*/[!.]*.log"; conf.errorArchiveDir = null; conf.postProcessing = PostProcessingOptions.NONE; conf.archiveDir = testDir.getAbsolutePath(); conf.retentionTimeMins = 10; conf.dataFormatConfig.textMaxLineLen = 10; conf.dataFormatConfig.onParseError = OnParseError.ERROR; conf.dataFormatConfig.maxStackTraceLines = 0; return new SpoolDirSource(conf); } private SpoolDirSource createTarGzipAvroSource() { SpoolDirConfigBean conf = new SpoolDirConfigBean(); conf.dataFormat = DataFormat.AVRO; conf.dataFormatConfig.avroSchemaSource = SOURCE; conf.spoolDir = testDir.getAbsolutePath(); conf.batchSize = 1000; conf.overrunLimit = 65; conf.poolingTimeoutSecs = 1; conf.filePattern = "testAvro*.tar.gz"; conf.maxSpoolFiles = 10; conf.initialFileToProcess = null; conf.dataFormatConfig.compression = Compression.COMPRESSED_ARCHIVE; conf.dataFormatConfig.filePatternInArchive = "[!.]*.avro"; conf.errorArchiveDir = null; conf.postProcessing = PostProcessingOptions.NONE; conf.archiveDir = testDir.getAbsolutePath(); conf.retentionTimeMins = 10; conf.dataFormatConfig.onParseError = OnParseError.ERROR; conf.dataFormatConfig.maxStackTraceLines = 0; return new SpoolDirSource(conf); } private SpoolDirSource createBz2Source() { SpoolDirConfigBean conf = new SpoolDirConfigBean(); conf.dataFormat = DataFormat.TEXT; conf.spoolDir = testDir.getAbsolutePath(); conf.batchSize = 1000; conf.overrunLimit = 65; conf.poolingTimeoutSecs = 1; conf.filePattern = "testFile*.bz2"; conf.maxSpoolFiles = 10; conf.initialFileToProcess = null; conf.dataFormatConfig.compression = Compression.COMPRESSED_FILE; conf.dataFormatConfig.filePatternInArchive = "*"; conf.errorArchiveDir = null; conf.postProcessing = PostProcessingOptions.NONE; conf.archiveDir = testDir.getAbsolutePath(); conf.retentionTimeMins = 10; conf.dataFormatConfig.textMaxLineLen = 10; conf.dataFormatConfig.onParseError = OnParseError.ERROR; conf.dataFormatConfig.maxStackTraceLines = 0; return new SpoolDirSource(conf); } }
package org.docksidestage.hangar.dbflute.cbean.cq.bs; import java.util.*; import org.dbflute.cbean.*; import org.dbflute.cbean.chelper.*; import org.dbflute.cbean.ckey.*; import org.dbflute.cbean.coption.*; import org.dbflute.cbean.cvalue.ConditionValue; import org.dbflute.cbean.ordering.*; import org.dbflute.cbean.scoping.*; import org.dbflute.cbean.sqlclause.SqlClause; import org.dbflute.dbmeta.DBMetaProvider; import org.docksidestage.hangar.dbflute.allcommon.*; import org.docksidestage.hangar.dbflute.cbean.*; import org.docksidestage.hangar.dbflute.cbean.cq.*; /** * The abstract condition-query of WHITE_SINGLE_PK. * @author DBFlute(AutoGenerator) */ public abstract class AbstractBsWhiteSinglePkCQ extends AbstractConditionQuery { // =================================================================================== // Constructor // =========== public AbstractBsWhiteSinglePkCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) { super(referrerQuery, sqlClause, aliasName, nestLevel); } // =================================================================================== // DB Meta // ======= @Override protected DBMetaProvider xgetDBMetaProvider() { return DBMetaInstanceHandler.getProvider(); } public String asTableDbName() { return "WHITE_SINGLE_PK"; } // =================================================================================== // Query // ===== /** * Equal(=). And NullIgnored, OnlyOnceRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} * @param onlyOnePkId The value of onlyOnePkId as equal. (basically NotNull: error as default, or no condition as option) */ public void setOnlyOnePkId_Equal(Long onlyOnePkId) { doSetOnlyOnePkId_Equal(onlyOnePkId); } protected void doSetOnlyOnePkId_Equal(Long onlyOnePkId) { regOnlyOnePkId(CK_EQ, onlyOnePkId); } /** * NotEqual(&lt;&gt;). And NullIgnored, OnlyOnceRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} * @param onlyOnePkId The value of onlyOnePkId as notEqual. (basically NotNull: error as default, or no condition as option) */ public void setOnlyOnePkId_NotEqual(Long onlyOnePkId) { doSetOnlyOnePkId_NotEqual(onlyOnePkId); } protected void doSetOnlyOnePkId_NotEqual(Long onlyOnePkId) { regOnlyOnePkId(CK_NES, onlyOnePkId); } /** * GreaterThan(&gt;). And NullIgnored, OnlyOnceRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} * @param onlyOnePkId The value of onlyOnePkId as greaterThan. (basically NotNull: error as default, or no condition as option) */ public void setOnlyOnePkId_GreaterThan(Long onlyOnePkId) { regOnlyOnePkId(CK_GT, onlyOnePkId); } /** * LessThan(&lt;). And NullIgnored, OnlyOnceRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} * @param onlyOnePkId The value of onlyOnePkId as lessThan. (basically NotNull: error as default, or no condition as option) */ public void setOnlyOnePkId_LessThan(Long onlyOnePkId) { regOnlyOnePkId(CK_LT, onlyOnePkId); } /** * GreaterEqual(&gt;=). And NullIgnored, OnlyOnceRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} * @param onlyOnePkId The value of onlyOnePkId as greaterEqual. (basically NotNull: error as default, or no condition as option) */ public void setOnlyOnePkId_GreaterEqual(Long onlyOnePkId) { regOnlyOnePkId(CK_GE, onlyOnePkId); } /** * LessEqual(&lt;=). And NullIgnored, OnlyOnceRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} * @param onlyOnePkId The value of onlyOnePkId as lessEqual. (basically NotNull: error as default, or no condition as option) */ public void setOnlyOnePkId_LessEqual(Long onlyOnePkId) { regOnlyOnePkId(CK_LE, onlyOnePkId); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} * @param minNumber The min number of onlyOnePkId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of onlyOnePkId. (basically NotNull: if op.allowOneSide(), null allowed) * @param opLambda The callback for option of range-of. (NotNull) */ public void setOnlyOnePkId_RangeOf(Long minNumber, Long maxNumber, ConditionOptionCall<RangeOfOption> opLambda) { setOnlyOnePkId_RangeOf(minNumber, maxNumber, xcROOP(opLambda)); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} * @param minNumber The min number of onlyOnePkId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of onlyOnePkId. (basically NotNull: if op.allowOneSide(), null allowed) * @param rangeOfOption The option of range-of. (NotNull) */ protected void setOnlyOnePkId_RangeOf(Long minNumber, Long maxNumber, RangeOfOption rangeOfOption) { regROO(minNumber, maxNumber, xgetCValueOnlyOnePkId(), "ONLY_ONE_PK_ID", rangeOfOption); } /** * InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} * @param onlyOnePkIdList The collection of onlyOnePkId as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setOnlyOnePkId_InScope(Collection<Long> onlyOnePkIdList) { doSetOnlyOnePkId_InScope(onlyOnePkIdList); } protected void doSetOnlyOnePkId_InScope(Collection<Long> onlyOnePkIdList) { regINS(CK_INS, cTL(onlyOnePkIdList), xgetCValueOnlyOnePkId(), "ONLY_ONE_PK_ID"); } /** * NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} * @param onlyOnePkIdList The collection of onlyOnePkId as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setOnlyOnePkId_NotInScope(Collection<Long> onlyOnePkIdList) { doSetOnlyOnePkId_NotInScope(onlyOnePkIdList); } protected void doSetOnlyOnePkId_NotInScope(Collection<Long> onlyOnePkIdList) { regINS(CK_NINS, cTL(onlyOnePkIdList), xgetCValueOnlyOnePkId(), "ONLY_ONE_PK_ID"); } /** * IsNull {is null}. And OnlyOnceRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} */ public void setOnlyOnePkId_IsNull() { regOnlyOnePkId(CK_ISN, DOBJ); } /** * IsNotNull {is not null}. And OnlyOnceRegistered. <br> * ONLY_ONE_PK_ID: {PK, NotNull, DECIMAL(16)} */ public void setOnlyOnePkId_IsNotNull() { regOnlyOnePkId(CK_ISNN, DOBJ); } protected void regOnlyOnePkId(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueOnlyOnePkId(), "ONLY_ONE_PK_ID"); } protected abstract ConditionValue xgetCValueOnlyOnePkId(); /** * Equal(=). And NullOrEmptyIgnored, OnlyOnceRegistered. <br> * SINGLE_PK_NAME: {NotNull, VARCHAR(200)} * @param singlePkName The value of singlePkName as equal. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setSinglePkName_Equal(String singlePkName) { doSetSinglePkName_Equal(fRES(singlePkName)); } protected void doSetSinglePkName_Equal(String singlePkName) { regSinglePkName(CK_EQ, singlePkName); } /** * LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br> * SINGLE_PK_NAME: {NotNull, VARCHAR(200)} <br> * <pre>e.g. setSinglePkName_LikeSearch("xxx", op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> op.<span style="color: #CC4747">likeContain()</span>);</pre> * @param singlePkName The value of singlePkName as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param opLambda The callback for option of like-search. (NotNull) */ public void setSinglePkName_LikeSearch(String singlePkName, ConditionOptionCall<LikeSearchOption> opLambda) { setSinglePkName_LikeSearch(singlePkName, xcLSOP(opLambda)); } /** * LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br> * SINGLE_PK_NAME: {NotNull, VARCHAR(200)} <br> * <pre>e.g. setSinglePkName_LikeSearch("xxx", new <span style="color: #CC4747">LikeSearchOption</span>().likeContain());</pre> * @param singlePkName The value of singlePkName as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param likeSearchOption The option of like-search. (NotNull) */ protected void setSinglePkName_LikeSearch(String singlePkName, LikeSearchOption likeSearchOption) { regLSQ(CK_LS, fRES(singlePkName), xgetCValueSinglePkName(), "SINGLE_PK_NAME", likeSearchOption); } /** * NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br> * And NullOrEmptyIgnored, SeveralRegistered. <br> * SINGLE_PK_NAME: {NotNull, VARCHAR(200)} * @param singlePkName The value of singlePkName as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param opLambda The callback for option of like-search. (NotNull) */ public void setSinglePkName_NotLikeSearch(String singlePkName, ConditionOptionCall<LikeSearchOption> opLambda) { setSinglePkName_NotLikeSearch(singlePkName, xcLSOP(opLambda)); } /** * NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br> * And NullOrEmptyIgnored, SeveralRegistered. <br> * SINGLE_PK_NAME: {NotNull, VARCHAR(200)} * @param singlePkName The value of singlePkName as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param likeSearchOption The option of not-like-search. (NotNull) */ protected void setSinglePkName_NotLikeSearch(String singlePkName, LikeSearchOption likeSearchOption) { regLSQ(CK_NLS, fRES(singlePkName), xgetCValueSinglePkName(), "SINGLE_PK_NAME", likeSearchOption); } protected void regSinglePkName(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueSinglePkName(), "SINGLE_PK_NAME"); } protected abstract ConditionValue xgetCValueSinglePkName(); /** * Equal(=). And NullIgnored, OnlyOnceRegistered. <br> * REFERRED_ID: {NotNull, INTEGER(10)} * @param referredId The value of referredId as equal. (basically NotNull: error as default, or no condition as option) */ public void setReferredId_Equal(Integer referredId) { doSetReferredId_Equal(referredId); } protected void doSetReferredId_Equal(Integer referredId) { regReferredId(CK_EQ, referredId); } /** * NotEqual(&lt;&gt;). And NullIgnored, OnlyOnceRegistered. <br> * REFERRED_ID: {NotNull, INTEGER(10)} * @param referredId The value of referredId as notEqual. (basically NotNull: error as default, or no condition as option) */ public void setReferredId_NotEqual(Integer referredId) { doSetReferredId_NotEqual(referredId); } protected void doSetReferredId_NotEqual(Integer referredId) { regReferredId(CK_NES, referredId); } /** * GreaterThan(&gt;). And NullIgnored, OnlyOnceRegistered. <br> * REFERRED_ID: {NotNull, INTEGER(10)} * @param referredId The value of referredId as greaterThan. (basically NotNull: error as default, or no condition as option) */ public void setReferredId_GreaterThan(Integer referredId) { regReferredId(CK_GT, referredId); } /** * LessThan(&lt;). And NullIgnored, OnlyOnceRegistered. <br> * REFERRED_ID: {NotNull, INTEGER(10)} * @param referredId The value of referredId as lessThan. (basically NotNull: error as default, or no condition as option) */ public void setReferredId_LessThan(Integer referredId) { regReferredId(CK_LT, referredId); } /** * GreaterEqual(&gt;=). And NullIgnored, OnlyOnceRegistered. <br> * REFERRED_ID: {NotNull, INTEGER(10)} * @param referredId The value of referredId as greaterEqual. (basically NotNull: error as default, or no condition as option) */ public void setReferredId_GreaterEqual(Integer referredId) { regReferredId(CK_GE, referredId); } /** * LessEqual(&lt;=). And NullIgnored, OnlyOnceRegistered. <br> * REFERRED_ID: {NotNull, INTEGER(10)} * @param referredId The value of referredId as lessEqual. (basically NotNull: error as default, or no condition as option) */ public void setReferredId_LessEqual(Integer referredId) { regReferredId(CK_LE, referredId); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * REFERRED_ID: {NotNull, INTEGER(10)} * @param minNumber The min number of referredId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of referredId. (basically NotNull: if op.allowOneSide(), null allowed) * @param opLambda The callback for option of range-of. (NotNull) */ public void setReferredId_RangeOf(Integer minNumber, Integer maxNumber, ConditionOptionCall<RangeOfOption> opLambda) { setReferredId_RangeOf(minNumber, maxNumber, xcROOP(opLambda)); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * REFERRED_ID: {NotNull, INTEGER(10)} * @param minNumber The min number of referredId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of referredId. (basically NotNull: if op.allowOneSide(), null allowed) * @param rangeOfOption The option of range-of. (NotNull) */ protected void setReferredId_RangeOf(Integer minNumber, Integer maxNumber, RangeOfOption rangeOfOption) { regROO(minNumber, maxNumber, xgetCValueReferredId(), "REFERRED_ID", rangeOfOption); } /** * InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * REFERRED_ID: {NotNull, INTEGER(10)} * @param referredIdList The collection of referredId as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setReferredId_InScope(Collection<Integer> referredIdList) { doSetReferredId_InScope(referredIdList); } protected void doSetReferredId_InScope(Collection<Integer> referredIdList) { regINS(CK_INS, cTL(referredIdList), xgetCValueReferredId(), "REFERRED_ID"); } /** * NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * REFERRED_ID: {NotNull, INTEGER(10)} * @param referredIdList The collection of referredId as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setReferredId_NotInScope(Collection<Integer> referredIdList) { doSetReferredId_NotInScope(referredIdList); } protected void doSetReferredId_NotInScope(Collection<Integer> referredIdList) { regINS(CK_NINS, cTL(referredIdList), xgetCValueReferredId(), "REFERRED_ID"); } protected void regReferredId(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueReferredId(), "REFERRED_ID"); } protected abstract ConditionValue xgetCValueReferredId(); // =================================================================================== // ScalarCondition // =============== /** * Prepare ScalarCondition as equal. <br> * {where FOO = (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteSinglePkCB> scalar_Equal() { return xcreateSLCFunction(CK_EQ, WhiteSinglePkCB.class); } /** * Prepare ScalarCondition as equal. <br> * {where FOO &lt;&gt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteSinglePkCB> scalar_NotEqual() { return xcreateSLCFunction(CK_NES, WhiteSinglePkCB.class); } /** * Prepare ScalarCondition as greaterThan. <br> * {where FOO &gt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteSinglePkCB> scalar_GreaterThan() { return xcreateSLCFunction(CK_GT, WhiteSinglePkCB.class); } /** * Prepare ScalarCondition as lessThan. <br> * {where FOO &lt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteSinglePkCB> scalar_LessThan() { return xcreateSLCFunction(CK_LT, WhiteSinglePkCB.class); } /** * Prepare ScalarCondition as greaterEqual. <br> * {where FOO &gt;= (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteSinglePkCB> scalar_GreaterEqual() { return xcreateSLCFunction(CK_GE, WhiteSinglePkCB.class); } /** * Prepare ScalarCondition as lessEqual. <br> * {where FOO &lt;= (select max(BAR) from ...)} * <pre> * cb.query().<span style="color: #CC4747">scalar_LessEqual()</span>.max(new SubQuery&lt;WhiteSinglePkCB&gt;() { * public void query(WhiteSinglePkCB subCB) { * subCB.specify().setFoo... <span style="color: #3F7E5E">// derived column for function</span> * subCB.query().setBar... * } * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteSinglePkCB> scalar_LessEqual() { return xcreateSLCFunction(CK_LE, WhiteSinglePkCB.class); } @SuppressWarnings("unchecked") protected <CB extends ConditionBean> void xscalarCondition(String fn, SubQuery<CB> sq, String rd, HpSLCCustomized<CB> cs, ScalarConditionOption op) { assertObjectNotNull("subQuery", sq); WhiteSinglePkCB cb = xcreateScalarConditionCB(); sq.query((CB)cb); String pp = keepScalarCondition(cb.query()); // for saving query-value cs.setPartitionByCBean((CB)xcreateScalarConditionPartitionByCB()); // for using partition-by registerScalarCondition(fn, cb.query(), pp, rd, cs, op); } public abstract String keepScalarCondition(WhiteSinglePkCQ sq); protected WhiteSinglePkCB xcreateScalarConditionCB() { WhiteSinglePkCB cb = newMyCB(); cb.xsetupForScalarCondition(this); return cb; } protected WhiteSinglePkCB xcreateScalarConditionPartitionByCB() { WhiteSinglePkCB cb = newMyCB(); cb.xsetupForScalarConditionPartitionBy(this); return cb; } // =================================================================================== // MyselfDerived // ============= public void xsmyselfDerive(String fn, SubQuery<WhiteSinglePkCB> sq, String al, DerivedReferrerOption op) { assertObjectNotNull("subQuery", sq); WhiteSinglePkCB cb = new WhiteSinglePkCB(); cb.xsetupForDerivedReferrer(this); lockCall(() -> sq.query(cb)); String pp = keepSpecifyMyselfDerived(cb.query()); String pk = "ONLY_ONE_PK_ID"; registerSpecifyMyselfDerived(fn, cb.query(), pk, pk, pp, "myselfDerived", al, op); } public abstract String keepSpecifyMyselfDerived(WhiteSinglePkCQ sq); /** * Prepare for (Query)MyselfDerived (correlated sub-query). * @return The object to set up a function for myself table. (NotNull) */ public HpQDRFunction<WhiteSinglePkCB> myselfDerived() { return xcreateQDRFunctionMyselfDerived(WhiteSinglePkCB.class); } @SuppressWarnings("unchecked") protected <CB extends ConditionBean> void xqderiveMyselfDerived(String fn, SubQuery<CB> sq, String rd, Object vl, DerivedReferrerOption op) { assertObjectNotNull("subQuery", sq); WhiteSinglePkCB cb = new WhiteSinglePkCB(); cb.xsetupForDerivedReferrer(this); sq.query((CB)cb); String pk = "ONLY_ONE_PK_ID"; String sqpp = keepQueryMyselfDerived(cb.query()); // for saving query-value. String prpp = keepQueryMyselfDerivedParameter(vl); registerQueryMyselfDerived(fn, cb.query(), pk, pk, sqpp, "myselfDerived", rd, vl, prpp, op); } public abstract String keepQueryMyselfDerived(WhiteSinglePkCQ sq); public abstract String keepQueryMyselfDerivedParameter(Object vl); // =================================================================================== // MyselfExists // ============ /** * Prepare for MyselfExists (correlated sub-query). * @param subCBLambda The implementation of sub-query. (NotNull) */ public void myselfExists(SubQuery<WhiteSinglePkCB> subCBLambda) { assertObjectNotNull("subCBLambda", subCBLambda); WhiteSinglePkCB cb = new WhiteSinglePkCB(); cb.xsetupForMyselfExists(this); lockCall(() -> subCBLambda.query(cb)); String pp = keepMyselfExists(cb.query()); registerMyselfExists(cb.query(), pp); } public abstract String keepMyselfExists(WhiteSinglePkCQ sq); // =================================================================================== // Manual Order // ============ /** * Order along manual ordering information. * <pre> * cb.query().addOrderBy_Birthdate_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_GreaterEqual</span>(priorityDate); <span style="color: #3F7E5E">// e.g. 2000/01/01</span> * }); * <span style="color: #3F7E5E">// order by </span> * <span style="color: #3F7E5E">// case</span> * <span style="color: #3F7E5E">// when BIRTHDATE &gt;= '2000/01/01' then 0</span> * <span style="color: #3F7E5E">// else 1</span> * <span style="color: #3F7E5E">// end asc, ...</span> * * cb.query().addOrderBy_MemberStatusCode_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Withdrawal); * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Formalized); * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Provisional); * }); * <span style="color: #3F7E5E">// order by </span> * <span style="color: #3F7E5E">// case</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'WDL' then 0</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'FML' then 1</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'PRV' then 2</span> * <span style="color: #3F7E5E">// else 3</span> * <span style="color: #3F7E5E">// end asc, ...</span> * </pre> * <p>This function with Union is unsupported!</p> * <p>The order values are bound (treated as bind parameter).</p> * @param opLambda The callback for option of manual-order containing order values. (NotNull) */ public void withManualOrder(ManualOrderOptionCall opLambda) { // is user public! xdoWithManualOrder(cMOO(opLambda)); } // =================================================================================== // Small Adjustment // ================ // =================================================================================== // Very Internal // ============= protected WhiteSinglePkCB newMyCB() { return new WhiteSinglePkCB(); } // very internal (for suppressing warn about 'Not Use Import') protected String xabUDT() { return Date.class.getName(); } protected String xabCQ() { return WhiteSinglePkCQ.class.getName(); } protected String xabLSO() { return LikeSearchOption.class.getName(); } protected String xabSLCS() { return HpSLCSetupper.class.getName(); } protected String xabSCP() { return SubQuery.class.getName(); } }
/* * See LICENSE file in distribution for copyright and licensing * information. */ package ioke.lang; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import ioke.lang.exceptions.ControlFlow; /** * * @author <a href="mailto:ola.bini@gmail.com">Ola Bini</a> */ public abstract class IokeData { public final int type; public final static int TYPE_NONE = 0; public final static int TYPE_DEFAULT_METHOD = 1; public final static int TYPE_DEFAULT_MACRO = 2; public final static int TYPE_DEFAULT_SYNTAX = 3; public final static int TYPE_LEXICAL_MACRO = 4; public final static int TYPE_ALIAS_METHOD = 5; public final static int TYPE_NATIVE_METHOD = 6; public final static int TYPE_JAVA_CONSTRUCTOR = 7; public final static int TYPE_JAVA_FIELD_GETTER = 8; public final static int TYPE_JAVA_FIELD_SETTER = 9; public final static int TYPE_JAVA_METHOD = 10; public final static int TYPE_METHOD_PROTOTYPE = 11; public final static int TYPE_LEXICAL_BLOCK = 12; public IokeData() { this(TYPE_NONE); } public IokeData(final int type) { this.type = type; } public final static IokeData None = new IokeData() { }; public final static IokeData Nil = new IokeData() { @Override public void init( IokeObject obj) { obj.setKind("nil"); obj.body.flags |= IokeObject.NIL_F | IokeObject.FALSY_F; } @Override public void checkMimic( IokeObject obj, IokeObject m, IokeObject context) throws ControlFlow { final IokeObject condition = IokeObject .as(IokeObject .getCellChain( context.runtime.condition, m, context, "Error", "CantMimicOddball"), context) .mimic(m, context); condition.setCell( "message", m); condition.setCell( "context", context); condition.setCell( "receiver", obj); context.runtime .errorCondition( condition); } @Override public String toString( IokeObject self) { return "nil"; } }; public final static IokeData False = new IokeData() { @Override public void init( IokeObject obj) { obj.setKind("false"); obj.body.flags |= IokeObject.FALSY_F; } @Override public void checkMimic( IokeObject obj, IokeObject m, IokeObject context) throws ControlFlow { final IokeObject condition = IokeObject .as(IokeObject .getCellChain( context.runtime.condition, m, context, "Error", "CantMimicOddball"), context) .mimic(m, context); condition.setCell( "message", m); condition.setCell( "context", context); condition.setCell( "receiver", obj); context.runtime .errorCondition( condition); } @Override public String toString( IokeObject self) { return "false"; } }; public final static IokeData True = new IokeData() { @Override public void init( IokeObject obj) { obj.setKind("true"); } @Override public void checkMimic( IokeObject obj, IokeObject m, IokeObject context) throws ControlFlow { final IokeObject condition = IokeObject .as(IokeObject .getCellChain( context.runtime.condition, m, context, "Error", "CantMimicOddball"), context) .mimic(m, context); condition.setCell( "message", m); condition.setCell( "context", context); condition.setCell( "receiver", obj); context.runtime .errorCondition( condition); } @Override public String toString( IokeObject self) { return "true"; } }; public void init(IokeObject obj) throws ControlFlow { } public void checkMimic(IokeObject obj, IokeObject m, IokeObject context) throws ControlFlow { } public boolean isMessage() { return false; } public boolean isSymbol() { return false; } public IokeObject negate(IokeObject obj) { return obj; } public final boolean isEqualTo(IokeObject self, Object other) throws ControlFlow { Object cell = IokeObject.findCell(self, "=="); if (cell == self.runtime.nul) { boolean result = (other instanceof IokeObject) && (self.body == IokeObject.as(other, self).body); return result; } boolean result = IokeObject.isTrue(Interpreter.send( self.runtime.eqMessage, self.runtime.ground, self, self.runtime.createMessage( Message.wrap(IokeObject.as(other, self))))); return result; } public final int hashCode(IokeObject self) throws ControlFlow { Object cell = IokeObject.findCell(self, "hash"); if (cell == self.runtime.nul) { return System.identityHashCode(self.body); } return Number.extractInt( Interpreter.send(self.runtime.hashMessage, self.runtime.ground, self), self.runtime.hashMessage, self.runtime.ground); } public IokeData cloneData(IokeObject obj, IokeObject m, IokeObject context) { return this; } public Object convertTo(IokeObject self, String kind, boolean signalCondition, String conversionMethod, IokeObject message, final IokeObject context) throws ControlFlow { if (IokeObject.isKind(self, kind, context)) { return self; } if (signalCondition) { final IokeObject condition = IokeObject .as(IokeObject.getCellChain(context.runtime.condition, message, context, "Error", "Type", "IncorrectType"), context) .mimic(message, context); condition.setCell("message", message); condition.setCell("context", context); condition.setCell("receiver", self); condition.setCell("expectedType", context.runtime.getSymbol(kind)); final Object[] newCell = new Object[] { self }; context.runtime.withRestartReturningArguments( new RunnableWithControlFlow() { @Override public void run() throws ControlFlow { context.runtime.errorCondition(condition); } }, context, new Restart.ArgumentGivingRestart("useValue") { @Override public List<String> getArgumentNames() { return new ArrayList<>( Arrays.asList("newValue")); } @Override public IokeObject invoke(IokeObject context, List<Object> arguments) throws ControlFlow { newCell[0] = arguments.get(0); return context.runtime.nil; } }); return IokeObject.convertTo(newCell[0], kind, signalCondition, conversionMethod, message, context); } return null; } public Object convertTo(IokeObject self, Object mimic, boolean signalCondition, String conversionMethod, IokeObject message, final IokeObject context) throws ControlFlow { if (IokeObject.isMimic(self, IokeObject.as(mimic, context), context)) { return self; } if (signalCondition) { final IokeObject condition = IokeObject .as(IokeObject.getCellChain(context.runtime.condition, message, context, "Error", "Type", "IncorrectType"), context) .mimic(message, context); condition.setCell("message", message); condition.setCell("context", context); condition.setCell("receiver", self); condition.setCell("expectedType", mimic); final Object[] newCell = new Object[] { self }; context.runtime.withRestartReturningArguments( new RunnableWithControlFlow() { @Override public void run() throws ControlFlow { context.runtime.errorCondition(condition); } }, context, new Restart.ArgumentGivingRestart("useValue") { @Override public List<String> getArgumentNames() { return new ArrayList<>( Arrays.asList("newValue")); } @Override public IokeObject invoke(IokeObject context, List<Object> arguments) throws ControlFlow { newCell[0] = arguments.get(0); return context.runtime.nil; } }); return IokeObject.convertTo(mimic, newCell[0], signalCondition, conversionMethod, message, context); } return null; } public IokeObject convertToRational(IokeObject self, IokeObject m, final IokeObject context, boolean signalCondition) throws ControlFlow { if (signalCondition) { final IokeObject condition = IokeObject.as( IokeObject.getCellChain(context.runtime.condition, m, context, "Error", "Type", "IncorrectType"), context).mimic(m, context); condition.setCell("message", m); condition.setCell("context", context); condition.setCell("receiver", self); condition.setCell("expectedType", context.runtime.getSymbol("Rational")); final Object[] newCell = new Object[] { self }; context.runtime.withRestartReturningArguments( new RunnableWithControlFlow() { @Override public void run() throws ControlFlow { context.runtime.errorCondition(condition); } }, context, new Restart.ArgumentGivingRestart("useValue") { @Override public List<String> getArgumentNames() { return new ArrayList<>( Arrays.asList("newValue")); } @Override public IokeObject invoke(IokeObject context, List<Object> arguments) throws ControlFlow { newCell[0] = arguments.get(0); return context.runtime.nil; } }); return IokeObject.convertToRational(newCell[0], m, context, signalCondition); } return null; } public IokeObject convertToDecimal(IokeObject self, IokeObject m, final IokeObject context, boolean signalCondition) throws ControlFlow { if (signalCondition) { final IokeObject condition = IokeObject.as( IokeObject.getCellChain(context.runtime.condition, m, context, "Error", "Type", "IncorrectType"), context).mimic(m, context); condition.setCell("message", m); condition.setCell("context", context); condition.setCell("receiver", self); condition.setCell("expectedType", context.runtime.getSymbol("Decimal")); final Object[] newCell = new Object[] { self }; context.runtime.withRestartReturningArguments( new RunnableWithControlFlow() { @Override public void run() throws ControlFlow { context.runtime.errorCondition(condition); } }, context, new Restart.ArgumentGivingRestart("useValue") { @Override public List<String> getArgumentNames() { return new ArrayList<>( Arrays.asList("newValue")); } @Override public IokeObject invoke(IokeObject context, List<Object> arguments) throws ControlFlow { newCell[0] = arguments.get(0); return context.runtime.nil; } }); return IokeObject.convertToDecimal(newCell[0], m, context, signalCondition); } return null; } public IokeObject convertToNumber(IokeObject self, IokeObject m, final IokeObject context) throws ControlFlow { final IokeObject condition = IokeObject.as( IokeObject.getCellChain(context.runtime.condition, m, context, "Error", "Type", "IncorrectType"), context).mimic(m, context); condition.setCell("message", m); condition.setCell("context", context); condition.setCell("receiver", self); condition.setCell("expectedType", context.runtime.getSymbol("Number")); final Object[] newCell = new Object[] { self }; context.runtime.withRestartReturningArguments( new RunnableWithControlFlow() { @Override public void run() throws ControlFlow { context.runtime.errorCondition(condition); } }, context, new Restart.ArgumentGivingRestart("useValue") { @Override public List<String> getArgumentNames() { return new ArrayList<>(Arrays.asList("newValue")); } @Override public IokeObject invoke(IokeObject context, List<Object> arguments) throws ControlFlow { newCell[0] = arguments.get(0); return context.runtime.nil; } }); return IokeObject.convertToNumber(newCell[0], m, context); } public IokeObject tryConvertToText(IokeObject self, IokeObject m, final IokeObject context) throws ControlFlow { return null; } public IokeObject convertToText(IokeObject self, IokeObject m, final IokeObject context, boolean signalCondition) throws ControlFlow { if (signalCondition) { final IokeObject condition = IokeObject.as( IokeObject.getCellChain(context.runtime.condition, m, context, "Error", "Type", "IncorrectType"), context).mimic(m, context); condition.setCell("message", m); condition.setCell("context", context); condition.setCell("receiver", self); condition.setCell("expectedType", context.runtime.getSymbol("Text")); final Object[] newCell = new Object[] { self }; context.runtime.withRestartReturningArguments( new RunnableWithControlFlow() { @Override public void run() throws ControlFlow { context.runtime.errorCondition(condition); } }, context, new Restart.ArgumentGivingRestart("useValue") { @Override public List<String> getArgumentNames() { return new ArrayList<>( Arrays.asList("newValue")); } @Override public IokeObject invoke(IokeObject context, List<Object> arguments) throws ControlFlow { newCell[0] = arguments.get(0); return context.runtime.nil; } }); return IokeObject.convertToText(newCell[0], m, context, signalCondition); } return null; } public IokeObject convertToSymbol(IokeObject self, IokeObject m, final IokeObject context, final boolean signalCondition) throws ControlFlow { if (signalCondition) { final IokeObject condition = IokeObject.as( IokeObject.getCellChain(context.runtime.condition, m, context, "Error", "Type", "IncorrectType"), context).mimic(m, context); condition.setCell("message", m); condition.setCell("context", context); condition.setCell("receiver", self); condition.setCell("expectedType", context.runtime.getSymbol("Symbol")); final Object[] newCell = new Object[] { self }; context.runtime.withRestartReturningArguments( new RunnableWithControlFlow() { @Override public void run() throws ControlFlow { context.runtime.errorCondition(condition); } }, context, new Restart.ArgumentGivingRestart("useValue") { @Override public List<String> getArgumentNames() { return new ArrayList<>( Arrays.asList("newValue")); } @Override public IokeObject invoke(IokeObject context, List<Object> arguments) throws ControlFlow { newCell[0] = arguments.get(0); return context.runtime.nil; } }); return IokeObject.convertToSymbol(newCell[0], m, context, signalCondition); } return null; } public IokeObject convertToRegexp(IokeObject self, IokeObject m, final IokeObject context) throws ControlFlow { final IokeObject condition = IokeObject.as( IokeObject.getCellChain(context.runtime.condition, m, context, "Error", "Type", "IncorrectType"), context).mimic(m, context); condition.setCell("message", m); condition.setCell("context", context); condition.setCell("receiver", self); condition.setCell("expectedType", context.runtime.getSymbol("Regexp")); final Object[] newCell = new Object[] { self }; context.runtime.withRestartReturningArguments( new RunnableWithControlFlow() { @Override public void run() throws ControlFlow { context.runtime.errorCondition(condition); } }, context, new Restart.ArgumentGivingRestart("useValue") { @Override public List<String> getArgumentNames() { return new ArrayList<>(Arrays.asList("newValue")); } @Override public IokeObject invoke(IokeObject context, List<Object> arguments) throws ControlFlow { newCell[0] = arguments.get(0); return context.runtime.nil; } }); return IokeObject.convertToRegexp(newCell[0], m, context); } private static void report(Object self, IokeObject context, IokeObject message, String name) throws ControlFlow { IokeObject condition = IokeObject.as( IokeObject.getCellChain(context.runtime.condition, message, context, "Error", "Invocation", "NotActivatable"), context).mimic(message, context); condition.setCell("message", message); condition.setCell("context", context); condition.setCell("receiver", self); condition.setCell("methodName", context.runtime.getSymbol(name)); context.runtime.errorCondition(condition); } public static Object activateFixed(IokeObject self, IokeObject context, IokeObject message, Object on) throws ControlFlow { Object cell = IokeObject.findCell(self, "activate"); if (cell == context.runtime.nul) { report(self, context, message, "activate"); return context.runtime.nil; } IokeObject newMessage = Message.deepCopy(message); newMessage.getArguments().clear(); newMessage.getArguments() .add(context.runtime.createMessage(Message.wrap(context))); newMessage.getArguments() .add(context.runtime.createMessage(Message.wrap(message))); newMessage.getArguments().add(context.runtime .createMessage(Message.wrap(IokeObject.as(on, context)))); return Interpreter.getOrActivate(cell, context, newMessage, self); } public List<Object> getArguments(IokeObject self) throws ControlFlow { report(self, self, self, "getArguments"); return null; } public int getArgumentCount(IokeObject self) throws ControlFlow { report(self, self, self, "getArgumentCount"); return -1; } public String getName(IokeObject self) throws ControlFlow { report(self, self, self, "getName"); return null; } public String getFile(IokeObject self) throws ControlFlow { report(self, self, self, "getFile"); return null; } public int getLine(IokeObject self) throws ControlFlow { report(self, self, self, "getLine"); return -1; } public int getPosition(IokeObject self) throws ControlFlow { report(self, self, self, "getPosition"); return -1; } public String toString(IokeObject self) { try { int h = hashCode(self); String hash = Integer.toHexString(h).toUpperCase(); if (self == self.runtime.nul) { return "#<nul:" + hash + ">"; } Object obj = Interpreter.send(self.runtime.kindMessage, self.runtime.ground, self); String kind = ((Text) IokeObject.data(obj)).getText(); return "#<" + kind + ":" + hash + ">"; } catch (ControlFlow e) { } return "an error happened somewhere"; } public String getConvertMethod() { return null; } }// IokeData
/* * Copyright 2005 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.reteoo.common; import org.drools.core.RuleBaseConfiguration; import org.drools.core.WorkingMemory; import org.drools.core.base.SalienceInteger; import org.drools.core.common.AgendaGroupQueueImpl; import org.drools.core.common.AgendaItem; import org.drools.core.common.DefaultFactHandle; import org.drools.core.common.InternalAgenda; import org.drools.core.common.InternalAgendaGroup; import org.drools.core.common.InternalRuleFlowGroup; import org.drools.core.common.InternalWorkingMemory; import org.drools.core.common.PropagationContextFactory; import org.drools.core.definitions.rule.impl.RuleImpl; import org.drools.core.event.ActivationCancelledEvent; import org.drools.core.event.DefaultAgendaEventListener; import org.drools.core.impl.InternalKnowledgeBase; import org.drools.core.impl.StatefulKnowledgeSessionImpl; import org.drools.core.reteoo.MockTupleSource; import org.drools.core.reteoo.ReteooBuilder.IdGenerator; import org.drools.core.reteoo.RuleTerminalNode; import org.drools.core.reteoo.RuleTerminalNodeLeftTuple; import org.drools.core.reteoo.builder.BuildContext; import org.drools.core.spi.InternalActivationGroup; import org.drools.core.spi.AgendaGroup; import org.drools.core.spi.Consequence; import org.drools.core.spi.ConsequenceException; import org.drools.core.spi.KnowledgeHelper; import org.drools.core.spi.PropagationContext; import org.drools.core.spi.RuleFlowGroup; import org.drools.core.test.model.Cheese; import org.drools.core.test.model.DroolsTestCase; import org.drools.core.time.impl.DurationTimer; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.kie.api.event.rule.MatchCancelledCause; import org.kie.api.runtime.rule.AgendaFilter; import org.kie.api.runtime.rule.Match; import org.kie.internal.KnowledgeBaseFactory; import org.kie.internal.event.rule.ActivationUnMatchListener; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.junit.Assert.*; @Ignore public class AgendaTest extends DroolsTestCase { private InternalKnowledgeBase kBase; private BuildContext buildContext; private PropagationContextFactory pctxFactory; @Before public void setUp() throws Exception { RuleBaseConfiguration config = new RuleBaseConfiguration(); config.setPhreakEnabled(false); kBase = (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase(); buildContext = new BuildContext(kBase, kBase.getReteooBuilder().getIdGenerator()); pctxFactory = kBase.getConfiguration().getComponentFactory().getPropagationContextFactory(); } @Test public void testClearAgenda() { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); final RuleImpl rule1 = new RuleImpl("test-rule1"); final RuleImpl rule2 = new RuleImpl("test-rule2"); final RuleTerminalNode node1 = new RuleTerminalNode(3, new MockTupleSource(2), rule1, rule1.getLhs(), 0, buildContext); final RuleTerminalNode node2 = new RuleTerminalNode(5, new MockTupleSource(4), rule2, rule2.getLhs(), 0, buildContext); final RuleTerminalNodeLeftTuple tuple1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node1, true); final RuleTerminalNodeLeftTuple tuple2 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(2, "cheese"), node2, true); final PropagationContext context1 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule1, null, new DefaultFactHandle()); // Add consequence. Notice here the context here for the add to ageyunda // is itself rule1.setConsequence(new org.drools.core.spi.Consequence() { private static final long serialVersionUID = 510l; public void evaluate(final KnowledgeHelper knowledgeHelper, final WorkingMemory workingMemory) { // do nothing } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }); // Add consequence. Notice here the context here for the add to ageyunda // is itself rule2.setConsequence(new org.drools.core.spi.Consequence() { private static final long serialVersionUID = 510l; public void evaluate(final KnowledgeHelper knowledgeHelper, final WorkingMemory workingMemory) { // do nothing } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }); assertEquals(0, agenda.getFocus().size()); rule1.setNoLoop(false); rule2.setTimer(new DurationTimer(5000)); node1.assertLeftTuple(tuple1, context1, ksession); node2.assertLeftTuple(tuple2, context1, ksession); agenda.unstageActivations(); // make sure we have an activation in the current focus assertEquals(1, agenda.getFocus().size()); assertEquals(1, agenda.getScheduledActivations().length); agenda.clearAndCancel(); assertEquals(0, agenda.getFocus().size()); assertEquals(0, agenda.getScheduledActivations().length); } @Test public void testActivationUnMatchListener() { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); final RuleImpl rule1 = new RuleImpl("test-rule1"); final RuleTerminalNode node1 = new RuleTerminalNode(3, new MockTupleSource(2), rule1, rule1.getLhs(), 0, buildContext); Cheese cheese = new Cheese(); cheese.setPrice(50); final RuleTerminalNodeLeftTuple tuple = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, cheese), node1, true); final PropagationContext context1 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule1, null, new DefaultFactHandle()); // Add consequence. Notice here the context here for the add to ageyunda // is itself rule1.setConsequence(new org.drools.core.spi.Consequence() { private static final long serialVersionUID = 510l; public void evaluate(final KnowledgeHelper knowledgeHelper, final WorkingMemory workingMemory) { AgendaItem item = (AgendaItem) knowledgeHelper.getMatch(); final Cheese cheese = (Cheese) item.getTuple().getFactHandle().getObject(); final int oldPrice = cheese.getPrice(); cheese.setPrice(100); item.setActivationUnMatchListener(new ActivationUnMatchListener() { public void unMatch(org.kie.api.runtime.rule.RuleRuntime wm, Match activation) { cheese.setPrice(oldPrice); } }); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }); assertEquals(50, cheese.getPrice()); node1.assertLeftTuple(tuple, context1, ksession); agenda.unstageActivations(); agenda.fireNextItem(null, 0, -1); assertEquals(100, cheese.getPrice()); final PropagationContext context0 = pctxFactory.createPropagationContext(0, PropagationContext.Type.DELETION, rule1, null, new DefaultFactHandle()); node1.retractLeftTuple(tuple, context0, ksession); assertEquals(50, cheese.getPrice()); } @Test public void testFilters() throws Exception { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); final Boolean[] filtered = new Boolean[]{false}; ksession.addEventListener(new DefaultAgendaEventListener() { public void activationCancelled(ActivationCancelledEvent event, WorkingMemory workingMemory) { if (event.getCause() == MatchCancelledCause.FILTER) { filtered[0] = true; } } }); final RuleImpl rule = new RuleImpl("test-rule"); final RuleTerminalNode node = new RuleTerminalNode(3, new MockTupleSource(2), rule, rule.getLhs(), 0, buildContext); final Map results = new HashMap(); // add consequence rule.setConsequence(new org.drools.core.spi.Consequence() { private static final long serialVersionUID = 510l; public void evaluate(final KnowledgeHelper knowledgeHelper, final WorkingMemory workingMemory) { results.put("fired", new Boolean(true)); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }); final RuleTerminalNodeLeftTuple tuple = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node, true); final PropagationContext context = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule, null, new DefaultFactHandle()); // test agenda is empty assertEquals(0, agenda.getFocus().size()); // True filter, activations should always add final AgendaFilter filterTrue = new AgendaFilter() { public boolean accept(Match item) { return true; } }; rule.setNoLoop(false); node.assertLeftTuple(tuple, context, ksession); agenda.unstageActivations(); // check there is an item to fire assertEquals(1, agenda.getFocus().size()); agenda.fireNextItem(filterTrue, 0, -1); // check focus is empty assertEquals(0, agenda.getFocus().size()); // make sure it also fired assertEquals(new Boolean(true), results.get("fired")); assertEquals(false, filtered[0].booleanValue()); // clear the agenda and the result map agenda.clearAndCancel(); results.clear(); // False filter, activations should always be denied final AgendaFilter filterFalse = new AgendaFilter() { public boolean accept(Match item) { return false; } }; rule.setNoLoop(false); node.assertLeftTuple(tuple, context, ksession); agenda.unstageActivations(); // check we have an item to fire assertEquals(1, agenda.getFocus().size()); agenda.fireNextItem(filterFalse, 0, -1); // make sure the focus is empty assertEquals(0, agenda.getFocus().size()); // check the consequence never fired assertNull(results.get("fired")); assertEquals(true, filtered[0].booleanValue()); } @Test public void testFocusStack() throws ConsequenceException { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); // create the consequence final Consequence consequence = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory workingMemory) { // do nothing } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; // create a rule for each agendaGroup final RuleImpl rule0 = new RuleImpl("test-rule0"); final RuleTerminalNode node0 = new RuleTerminalNode(3, new MockTupleSource(2), rule0, rule0.getLhs(), 0, buildContext); rule0.setConsequence(consequence); final PropagationContext context0 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule0, null, new DefaultFactHandle()); final RuleImpl rule1 = new RuleImpl("test-rule1", "agendaGroup1"); final RuleTerminalNode node1 = new RuleTerminalNode(5, new MockTupleSource(4), rule1, rule1.getLhs(), 0, buildContext); rule1.setConsequence(consequence); final PropagationContext context1 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule1, null, new DefaultFactHandle()); final RuleImpl rule2 = new RuleImpl("test-rule2", "agendaGroup2"); final RuleTerminalNode node2 = new RuleTerminalNode(7, new MockTupleSource(6), rule2, rule2.getLhs(), 0, buildContext); rule2.setConsequence(consequence); final PropagationContext context2 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule2, null, new DefaultFactHandle()); final RuleImpl rule3 = new RuleImpl("test-rule3", "agendaGroup3"); final RuleTerminalNode node3 = new RuleTerminalNode(9, new MockTupleSource(8), rule3, rule3.getLhs(), 0, buildContext); rule3.setConsequence(consequence); final PropagationContext context3 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule3, null, new DefaultFactHandle()); final RuleTerminalNodeLeftTuple tuple1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); final RuleTerminalNodeLeftTuple tuple2 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(2, "cheese"), node2, true); final RuleTerminalNodeLeftTuple tuple3 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(3, "cheese"), node2, true); final RuleTerminalNodeLeftTuple tuple4 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(4, "cheese"), node3, true); final RuleTerminalNodeLeftTuple tuple5 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(5, "cheese"), node3, true); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); // create the AgendaGroups final AgendaGroup agendaGroup1 = new AgendaGroupQueueImpl("agendaGroup1", kBase); agenda.addAgendaGroup(agendaGroup1); final AgendaGroup agendaGroup2 = new AgendaGroupQueueImpl("agendaGroup2", kBase); agenda.addAgendaGroup(agendaGroup2); final AgendaGroup agendaGroup3 = new AgendaGroupQueueImpl("agendaGroup3", kBase); agenda.addAgendaGroup(agendaGroup3); // focus at this point is MAIN assertEquals(0, agenda.focusStackSize()); node0.assertLeftTuple(tuple1, context0, ksession); agenda.unstageActivations(); // check focus is main final AgendaGroup main = agenda.getAgendaGroup(AgendaGroup.MAIN); assertEquals(agenda.getFocus(), main); // check main got the tuple assertEquals(1, agenda.getFocus().size()); node2.assertLeftTuple(tuple2, context2, ksession); agenda.unstageActivations(); // main is still focus and this tuple went to agendaGroup 2 assertEquals(1, agenda.getFocus().size()); // check agendaGroup2 still got the tuple assertEquals(1, agendaGroup2.size()); // make sure total agenda size reflects this assertEquals(2, agenda.agendaSize()); // put another one on agendaGroup 2 node2.assertLeftTuple(tuple3, context2, ksession); agenda.unstageActivations(); // main is still focus so shouldn't have increased assertEquals(1, agenda.getFocus().size()); // check agendaGroup2 still got the tuple assertEquals(2, agendaGroup2.size()); // make sure total agenda size reflects this assertEquals(3, agenda.agendaSize()); // set the focus to agendaGroup1, note agendaGroup1 has no activations agenda.setFocus("agendaGroup1"); // add agendaGroup2 onto the focus stack agenda.setFocus("agendaGroup2"); // finally add agendaGroup3 to the top of the focus stack agenda.setFocus("agendaGroup3"); // agendaGroup3, the current focus, has no activations assertEquals(0, agenda.getFocus().size()); // add to agendaGroup 3 node3.assertLeftTuple(tuple4, context3, ksession); agenda.unstageActivations(); assertEquals(1, agenda.getFocus().size()); node3.assertLeftTuple(tuple5, context3, ksession); agenda.unstageActivations(); // agendaGroup3 now has 2 activations assertEquals(2, agenda.getFocus().size()); // check totalAgendaSize still works assertEquals(5, agenda.agendaSize()); // ok now lets check that stacks work with fireNextItem agenda.fireNextItem(null, 0, -1); // agendaGroup3 should still be the current agendaGroup assertEquals(agenda.getFocus(), agendaGroup3); // agendaGroup3 has gone from 2 to one activations assertEquals(1, agenda.getFocus().size()); // check totalAgendaSize has reduced too assertEquals(4, agenda.agendaSize()); // now repeat the process agenda.fireNextItem(null, 0, -1); // focus is still agendaGroup3, but now its empty assertEquals(agenda.getFocus(), agendaGroup3); assertEquals(0, agenda.getFocus().size()); assertEquals(3, agenda.agendaSize()); // repeat fire again agenda.fireNextItem(null, 0, -1); // agendaGroup3 is empty so it should be popped from the stack making```````````````````` // agendaGroup2 // the current agendaGroup assertEquals(agendaGroup2, agenda.getFocus()); // agendaGroup2 had 2 activations, now it only has 1 assertEquals(1, agenda.getFocus().size()); assertEquals(2, agenda.agendaSize()); // repeat fire again agenda.fireNextItem(null, 0, -1); assertEquals(agenda.getFocus(), agendaGroup2); assertEquals(0, agenda.getFocus().size()); assertEquals(1, agenda.agendaSize()); // this last fire is more interesting as it demonstrates that // agendaGroup1 on // the stack before agendaGroup2 gets skipped as it has no activations agenda.fireNextItem(null, 0, -1); assertEquals(agenda.getFocus(), main); assertEquals(0, agenda.getFocus().size()); assertEquals(0, agenda.agendaSize()); } // @Test public void testAutoFocus() throws ConsequenceException { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); // create the agendaGroup final AgendaGroup agendaGroup = new AgendaGroupQueueImpl("agendaGroup", kBase); agenda.addAgendaGroup(agendaGroup); // create the consequence final Consequence consequence = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory workingMemory) { // do nothing } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; // create a rule for the agendaGroup final RuleImpl rule = new RuleImpl("test-rule", "agendaGroup"); final RuleTerminalNode node = new RuleTerminalNode(2, new MockTupleSource(2), rule, rule.getLhs(), 0, buildContext); final RuleTerminalNodeLeftTuple tuple = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node, true); rule.setConsequence(consequence); final PropagationContext context = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule, null, new DefaultFactHandle()); // first test that autoFocus=false works. Here the rule should not fire // as its agendaGroup does not have focus. rule.setAutoFocus(false); node.assertLeftTuple(tuple, context, ksession); agenda.unstageActivations(); // check activation as added to the agendaGroup assertEquals(1, agendaGroup.size()); // fire next item, agendaGroup should not fire as its not on the focus stack // and thus should retain its sinle activation agenda.fireNextItem(null, 0, -1); assertEquals(1, agendaGroup.size()); // Clear the agenda we we can test again agenda.clearAndCancel(); assertEquals(0, agendaGroup.size()); // Now test that autoFocus=true works. Here the rule should fire as its // agendaGroup gets the focus when the activation is created. rule.setAutoFocus(true); node.assertLeftTuple(tuple, context, ksession); agenda.unstageActivations(); assertEquals(1, agendaGroup.size()); agenda.fireNextItem(null, 0, -1); assertEquals(0, agendaGroup.size()); } @Test public void testAgendaGroupLockOnActive() { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); // create the agendaGroup final InternalAgendaGroup agendaGroup = new AgendaGroupQueueImpl("agendaGroup", kBase); agenda.addAgendaGroup(agendaGroup); // create a rule for the agendaGroup final RuleImpl rule = new RuleImpl("test-rule", "agendaGroup"); final RuleTerminalNode node = new RuleTerminalNode(2, new MockTupleSource(2), rule, rule.getLhs(), 0, buildContext); final RuleTerminalNodeLeftTuple tuple = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node, true); final PropagationContext context = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule, null, new DefaultFactHandle()); // When both the rule is lock-on-active and the agenda group is active, activations should be ignored rule.setLockOnActive(true); ((InternalRuleFlowGroup) agendaGroup).setAutoDeactivate(false); agendaGroup.setActive(true); node.assertLeftTuple(tuple, context, ksession); // activation should be ignored assertEquals(0, agendaGroup.size()); // lock-on-active is now false so activation should propagate rule.setLockOnActive(false); node.assertLeftTuple(tuple, context, ksession); agenda.unstageActivations(); assertEquals(1, agendaGroup.size()); // even if lock-on-active is true, unless the agenda group is active the activation will still propagate rule.setLockOnActive(true); agendaGroup.setActive(false); node.assertLeftTuple(tuple, context, ksession); agenda.unstageActivations(); assertEquals(2, agendaGroup.size()); } @Test public void testActivationGroup() { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); final List list = new ArrayList(); // create the consequence final Consequence consequence = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory workingMemory) { list.add(knowledgeHelper.getRule()); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; // create a rule for each agendaGroup final RuleImpl rule0 = new RuleImpl("test-rule0"); rule0.setActivationGroup("activation-group-0"); final RuleTerminalNode node0 = new RuleTerminalNode(3, new MockTupleSource(2), rule0, rule0.getLhs(), 0, buildContext); rule0.setConsequence(consequence); final PropagationContext context0 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule0, null, new DefaultFactHandle()); final RuleImpl rule1 = new RuleImpl("test-rule1"); rule1.setActivationGroup("activation-group-0"); rule1.setSalience(new SalienceInteger(10)); final RuleTerminalNode node1 = new RuleTerminalNode(5, new MockTupleSource(4), rule1, rule1.getLhs(), 0, buildContext); rule1.setConsequence(consequence); final PropagationContext context1 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule1, null, new DefaultFactHandle()); final RuleImpl rule2 = new RuleImpl("test-rule2"); rule2.setSalience(new SalienceInteger(-5)); final RuleTerminalNode node2 = new RuleTerminalNode(7, new MockTupleSource(6), rule2, rule2.getLhs(), 0, buildContext); rule2.setConsequence(consequence); final PropagationContext context2 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule2, null, new DefaultFactHandle()); final RuleImpl rule3 = new RuleImpl("test-rule3", "agendaGroup3"); rule3.setSalience(new SalienceInteger(-10)); rule3.setActivationGroup("activation-group-3"); final RuleTerminalNode node3 = new RuleTerminalNode(9, new MockTupleSource(8), rule3, rule3.getLhs(), 0, buildContext); rule3.setConsequence(consequence); final PropagationContext context3 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule3, null, new DefaultFactHandle()); final RuleTerminalNodeLeftTuple tuple1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node1, true); final RuleTerminalNodeLeftTuple tuple3 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node1, true); final RuleTerminalNodeLeftTuple tuple4 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node1, true); final RuleTerminalNodeLeftTuple tuple5 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node1, true); final RuleTerminalNodeLeftTuple tuple6 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node1, true); final RuleTerminalNodeLeftTuple tuple7 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node2, true); final RuleTerminalNodeLeftTuple tuple8 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node3, true); // Assert the tuple and check it was added to activation-group-0 node0.assertLeftTuple(tuple1, context0, ksession); agenda.unstageActivations(); final InternalActivationGroup activationGroup0 = agenda.getActivationGroup("activation-group-0"); assertEquals(1, activationGroup0.size()); // Removing a tuple should remove the activation from the activation-group-0 again node0.retractLeftTuple(tuple1, context0, ksession); assertEquals(0, activationGroup0.size()); // Assert the tuple again and check it was added to activation-group-0 node0.assertLeftTuple(tuple3, context0, ksession); agenda.unstageActivations(); assertEquals(1, activationGroup0.size()); // Assert another tuple and check it was added to activation-group-0 node1.assertLeftTuple(tuple4, context1, ksession); agenda.unstageActivations(); assertEquals(2, activationGroup0.size()); // There should now be two potential activations to fire assertEquals(2, agenda.focusStackSize()); // The first tuple should fire, adding itself to the List and clearing and cancelling the other Activations in the activation-group-0 agenda.fireNextItem(null, 0, -1); // Make sure the activation-group-0 is clear assertEquals(0, activationGroup0.size()); // Make sure the Agenda is empty assertEquals(0, agenda.focusStackSize()); // List should only have a single item, "rule0" assertEquals(1, list.size()); assertSame(rule1, list.get(0)); list.clear(); //------------------- // Now try a more complex scenario involving two Xor Groups and one rule not in a Group node0.assertLeftTuple(tuple5, context0, ksession); node1.assertLeftTuple(tuple6, context1, ksession); node2.assertLeftTuple(tuple7, context2, ksession); node3.assertLeftTuple(tuple8, context3, ksession); agenda.unstageActivations(); // activation-group-0 should be populated again assertEquals(2, activationGroup0.size()); // make sure the activation-group-3 is cleared when we can clear the Agenda Group for the activation that is in both final InternalActivationGroup activationGroup3 = agenda.getActivationGroup("activation-group-3"); assertEquals(4, agenda.agendaSize()); assertEquals(1, activationGroup3.size()); agenda.clearAndCancelAgendaGroup("agendaGroup3"); assertEquals(3, agenda.agendaSize()); assertEquals(0, activationGroup3.size()); // Activation for activation-group-0 should be next - the activation in no activation/agenda group should remain on the agenda agenda.fireNextItem(null, 0, -1); assertEquals(1, agenda.agendaSize()); assertEquals(0, activationGroup0.size()); // Fire the last activation and make sure the Agenda Empties agenda.fireNextItem(null, 0, -1); assertEquals(0, agenda.agendaSize()); assertEquals(2, list.size()); assertEquals(rule1, list.get(0)); assertEquals(rule2, list.get(1)); } /** * Basic RuleFlowGroup test where there are three rules, each in their own * RuleFlowGroup. First only rule-flow-group-0 is activated and rule0 is * executed. When the two remaining groups are activated, the rule with the * highest priority is executed first. */ @Test public void testRuleFlowGroup() { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); final List list = new ArrayList(); // create the consequence final Consequence consequence = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory workingMemory) { list.add(knowledgeHelper.getRule()); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; // create a rule for each rule flow groups final RuleImpl rule0 = new RuleImpl("test-rule0"); rule0.setAgendaGroup("rule-flow-group-0"); rule0.setConsequence(consequence); final RuleTerminalNode node0 = new RuleTerminalNode(3, new MockTupleSource(2), rule0, rule0.getLhs(), 0, buildContext); final RuleImpl rule1 = new RuleImpl("test-rule1"); rule1.setAgendaGroup("rule-flow-group-1"); rule1.setConsequence(consequence); final RuleTerminalNode node1 = new RuleTerminalNode(4, new MockTupleSource(2), rule1, rule1.getLhs(), 0, buildContext); final RuleImpl rule2 = new RuleImpl("test-rule2"); rule2.setAgendaGroup("rule-flow-group-2"); rule2.setConsequence(consequence); rule2.setSalience(new SalienceInteger(10)); final RuleTerminalNode node2 = new RuleTerminalNode(5, new MockTupleSource(2), rule2, rule2.getLhs(), 0, buildContext); final PropagationContext context0 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule0, null, new DefaultFactHandle()); final RuleFlowGroup ruleFlowGroup0 = agenda.getRuleFlowGroup("rule-flow-group-0"); final RuleFlowGroup ruleFlowGroup1 = agenda.getRuleFlowGroup("rule-flow-group-1"); final RuleFlowGroup ruleFlowGroup2 = agenda.getRuleFlowGroup("rule-flow-group-2"); final RuleTerminalNodeLeftTuple tuple0 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); node0.assertLeftTuple(tuple0, context0, ksession); final RuleTerminalNodeLeftTuple tuple1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node1, true); node0.assertLeftTuple(tuple1, context0, ksession); final RuleTerminalNodeLeftTuple tuple2 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node2, true); node1.assertLeftTuple(tuple2, context0, ksession); final RuleTerminalNodeLeftTuple tuple3 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); node2.assertLeftTuple(tuple3, context0, ksession); agenda.unstageActivations(); assertEquals(2, ruleFlowGroup0.size()); assertEquals(1, ruleFlowGroup1.size()); assertEquals(1, ruleFlowGroup2.size()); assertEquals(4, agenda.agendaSize()); // Activate the RuleFlowGroup, the nodes stay in the group, but should now also be in the Agenda agenda.activateRuleFlowGroup("rule-flow-group-0"); assertEquals(2, ruleFlowGroup0.size()); assertEquals(4, agenda.agendaSize()); // As we fire each rule they are removed from both the Agenda and the RuleFlowGroup agenda.fireNextItem(null, 0, -1); assertEquals(1, ruleFlowGroup0.size()); assertEquals(3, agenda.agendaSize()); // After firing all activations of RuleFlowGroup 0, the agenda is empty agenda.fireNextItem(null, 0, -1); assertEquals(0, ruleFlowGroup0.size()); assertEquals(2, agenda.agendaSize()); // Now we activate two RuleFlowGroups together // All their activations should be added to the agenda. agenda.activateRuleFlowGroup("rule-flow-group-1"); agenda.activateRuleFlowGroup("rule-flow-group-2"); assertEquals(1, ruleFlowGroup1.size()); assertEquals(1, ruleFlowGroup2.size()); assertEquals(2, agenda.agendaSize()); // we set the salience higher on rule2, so it sould fire first and empty ruleFlowGroup2 agenda.fireNextItem(null, 0, -1); assertEquals(1, ruleFlowGroup1.size()); assertEquals(0, ruleFlowGroup2.size()); assertEquals(1, agenda.agendaSize()); // this is the last activation, so everything should be empty after this agenda.fireNextItem(null, 0, -1); assertEquals(0, ruleFlowGroup0.size()); assertEquals(0, ruleFlowGroup1.size()); assertEquals(0, ruleFlowGroup2.size()); assertEquals(0, agenda.agendaSize()); } /** * RuleFlowGroup test that makes sure that, if new activations are created * for an active RuleFlowGroup, those activations get added to the agenda * directly as well. */ @Test public void testRuleFlowGroup1() { final StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); // create rule1 final Consequence consequence1 = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory workingMemory) { // do nothing } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; final RuleImpl rule1 = new RuleImpl("test-rule1"); rule1.setAgendaGroup("rule-flow-group-0"); rule1.setConsequence(consequence1); final RuleTerminalNode node1 = new RuleTerminalNode(4, new MockTupleSource(2), rule1, rule1.getLhs(), 0, buildContext); // create context final PropagationContext context0 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule1, null, new DefaultFactHandle()); // create rule0 final Consequence consequence0 = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory w) { // activate rule1 final RuleTerminalNodeLeftTuple tuple1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node1, true); node1.assertLeftTuple(tuple1, context0, ksession); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; final RuleImpl rule0 = new RuleImpl("test-rule0"); rule0.setAgendaGroup("rule-flow-group-0"); rule0.setConsequence(consequence0); final RuleTerminalNode node0 = new RuleTerminalNode(3, new MockTupleSource(2), rule0, rule0.getLhs(), 0, buildContext); final RuleFlowGroup ruleFlowGroup0 = agenda.getRuleFlowGroup("rule-flow-group-0"); // Create one activation for rule0 only final RuleTerminalNodeLeftTuple tuple0 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); node0.assertLeftTuple(tuple0, context0, ksession); agenda.unstageActivations(); // RuleFlowGroup should be populated, but the agenda shouldn't be assertEquals(1, ruleFlowGroup0.size()); // Activate the RuleFlowGroup, the activation stays in the group, but should now also be in the Agenda agenda.activateRuleFlowGroup("rule-flow-group-0"); assertEquals(1, ruleFlowGroup0.size()); // As we fire the rule, an new activation is created for rule1, and it should be added to group AND the agenda. agenda.fireNextItem(null, 0, -1); assertEquals(1, ruleFlowGroup0.size()); // After firing all activations of RuleFlowGroup 0, the agenda is empty agenda.fireNextItem(null, 0, -1); assertEquals(0, ruleFlowGroup0.size()); assertEquals(0, agenda.agendaSize()); } /** * RuleFlowGroup test that makes sure that, if an activation in an active * RuleFlowGroup gets deactivated, the activation is no longer executed. */ @Test public void testRuleFlowGroup2() { final StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); // create rule1 final Consequence consequence1 = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory workingMemory) { // do nothing } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; final RuleImpl rule1 = new RuleImpl("test-rule1"); rule1.setAgendaGroup("rule-flow-group-0"); rule1.setConsequence(consequence1); final RuleTerminalNode node1 = new RuleTerminalNode(4, new MockTupleSource(2), rule1, rule1.getLhs(), 0, buildContext); // create context final PropagationContext context0 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule1, null, new DefaultFactHandle()); final RuleTerminalNodeLeftTuple tuple1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node1, true); // create rule0 final Consequence consequence0 = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory w) { // deactivate rule1 node1.retractLeftTuple(tuple1, context0, ksession); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; final RuleImpl rule0 = new RuleImpl("test-rule0"); rule0.setAgendaGroup("rule-flow-group-0"); rule0.setConsequence(consequence0); rule0.setSalience(new SalienceInteger(10)); final RuleTerminalNode node0 = new RuleTerminalNode(3, new MockTupleSource(2), rule0, rule0.getLhs(), 0, buildContext); final RuleFlowGroup ruleFlowGroup0 = agenda.getRuleFlowGroup("rule-flow-group-0"); // Create an activation for both rules final RuleTerminalNodeLeftTuple tuple0 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); node0.assertLeftTuple(tuple0, context0, ksession); node1.assertLeftTuple(tuple1, context0, ksession); agenda.unstageActivations(); // RuleFlowGroup should be populated assertEquals(2, ruleFlowGroup0.size()); // Activate the RuleFlowGroup, the activations stay in the group, but should now also be in the Agenda agenda.activateRuleFlowGroup("rule-flow-group-0"); assertEquals(2, ruleFlowGroup0.size()); // As we fire the rule, rule0 should execute first, as it has higher salience. // Rule0 should deactivate rule1 as well, so the everything should be empty agenda.fireNextItem(null, 0, -1); assertEquals(0, ruleFlowGroup0.size()); agenda.fireNextItem(null, 0, -1); assertEquals(0, ruleFlowGroup0.size()); } /** * RuleFlowGroup test that makes sure that, when deactivating a RuleFlowGroup, * all activations for that group are no longer on the agenda. When * reactivating the RuleFlowGroup however, they get added to the agenda again. */ @Test public void testRuleFlowGroup3() { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); // create rule0 final Consequence consequence0 = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory w) { // do nothing } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; final RuleImpl rule0 = new RuleImpl("test-rule0"); rule0.setAgendaGroup("rule-flow-group-0"); rule0.setConsequence(consequence0); final RuleTerminalNode node0 = new RuleTerminalNode(1, new MockTupleSource(2), rule0, rule0.getLhs(), 0, buildContext); final RuleFlowGroup ruleFlowGroup0 = agenda.getRuleFlowGroup("rule-flow-group-0"); // create context final PropagationContext context0 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule0, null, new DefaultFactHandle()); // Create two activation for this rule final RuleTerminalNodeLeftTuple tuple0 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); node0.assertLeftTuple(tuple0, context0, ksession); final RuleTerminalNodeLeftTuple tuple1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); node0.assertLeftTuple(tuple1, context0, ksession); agenda.unstageActivations(); // RuleFlowGroup should be populated, but the agenda shouldn't be assertEquals(2, ruleFlowGroup0.size()); // Activate the RuleFlowGroup agenda.activateRuleFlowGroup("rule-flow-group-0"); assertEquals(2, ruleFlowGroup0.size()); // Reactivate an already active RuleFlowGroup should not have any effect agenda.activateRuleFlowGroup("rule-flow-group-0"); assertEquals(2, ruleFlowGroup0.size()); // Deactivate the RuleFlowGroup, the activations should be removed from // the agenda but still in the RuleFlowGroup agenda.deactivateRuleFlowGroup("rule-flow-group-0"); assertEquals(2, ruleFlowGroup0.size()); // Reactivate the RuleFlowGroup, the activations stay in the group, but // should now also be in the Agenda again agenda.activateRuleFlowGroup("rule-flow-group-0"); assertEquals(2, ruleFlowGroup0.size()); } /** * Test auto-deactivation of RuleFlowGroup. */ @Test public void testRuleFlowGroup4() { IdGenerator idGenerator = kBase.getReteooBuilder().getIdGenerator(); StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); // create rule0 final Consequence consequence0 = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory w) { // do nothing } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; final RuleImpl rule0 = new RuleImpl("test-rule0"); rule0.setAgendaGroup("rule-flow-group-0"); rule0.setConsequence(consequence0); final RuleTerminalNode node0 = new RuleTerminalNode(idGenerator.getNextId(), new MockTupleSource(idGenerator.getNextId()), rule0, rule0.getLhs(), 0, buildContext); final RuleFlowGroup ruleFlowGroup0 = agenda.getRuleFlowGroup("rule-flow-group-0"); assertTrue(ruleFlowGroup0.isAutoDeactivate()); ruleFlowGroup0.setAutoDeactivate(false); assertFalse(ruleFlowGroup0.isAutoDeactivate()); // create context final PropagationContext context0 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule0, null, new DefaultFactHandle()); // Create an activation for this rule final RuleTerminalNodeLeftTuple tuple0 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); node0.assertLeftTuple(tuple0, context0, ksession); ksession.fireAllRules(); // RuleFlowGroup should be populated, but the agenda shouldn't be assertEquals(1, ruleFlowGroup0.size()); // Activate the RuleFlowGroup agenda.activateRuleFlowGroup("rule-flow-group-0"); assertEquals(1, ruleFlowGroup0.size()); // Execute activation agenda.fireNextItem(null, 0, -1); assertEquals(0, ruleFlowGroup0.size()); assertTrue(ruleFlowGroup0.isActive()); // Set auto-deactivation status to true ruleFlowGroup0.setAutoDeactivate(true); assertTrue(ruleFlowGroup0.isAutoDeactivate()); agenda.fireNextItem(null, 0, -1); assertFalse(ruleFlowGroup0.isActive()); // Add another activation and activate RuleFlowGroup again final RuleTerminalNodeLeftTuple tuple1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); node0.assertLeftTuple(tuple1, context0, ksession); agenda.unstageActivations(); agenda.activateRuleFlowGroup("rule-flow-group-0"); assertEquals(1, ruleFlowGroup0.size()); assertTrue(ruleFlowGroup0.isActive()); // Execute the activation, the RuleFlowGroup should automatically deactivate agenda.fireNextItem(null, 0, -1); assertEquals(0, ruleFlowGroup0.size()); ksession.executeQueuedActionsForRete(); assertEquals(0, ruleFlowGroup0.size()); agenda.fireNextItem(null, 0, -1); assertFalse(ruleFlowGroup0.isActive()); // A new activation should now be added to the RuleFlowGroup but not to the agenda final RuleTerminalNodeLeftTuple tuple2 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); node0.assertLeftTuple(tuple2, context0, ksession); agenda.unstageActivations(); assertEquals(1, ruleFlowGroup0.size()); } /** * Test auto-deactivation of empty ruleflow group. */ @Test public void testRuleFlowGroup5() { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); // create rule0 final Consequence consequence0 = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory w) { // do nothing } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; final RuleImpl rule0 = new RuleImpl("test-rule0"); rule0.setRuleFlowGroup("rule-flow-group-0"); rule0.setConsequence(consequence0); final RuleFlowGroup ruleFlowGroup0 = agenda.getRuleFlowGroup("rule-flow-group-0"); assertTrue(ruleFlowGroup0.isAutoDeactivate()); // RuleFlowGroup should be empty, as well as the agenda assertEquals(0, ruleFlowGroup0.size()); assertEquals(0, agenda.agendaSize()); // @TODO FIXME (mdp) // // Activate the RuleFlowGroup, the activations stay in the group, but // // should now also be in the Agenda // agenda.activateRuleFlowGroup( "rule-flow-group-0" ); // assertEquals( 0, // ruleFlowGroup0.size() ); // assertEquals( 0, // agenda.agendaSize() ); // workingMemory.executeQueuedActions(); // // assertFalse( ruleFlowGroup0.isActive() ); } @Test public void testRuleFlowGroupLockOnActive() { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); final InternalAgenda agenda = (InternalAgenda) ksession.getAgenda(); // create the agendaGroup //final AgendaGroupImpl agendaGroup = new AgendaGroupImpl( "agendaGroup" ); //agenda.addAgendaGroup( agendaGroup ); final RuleFlowGroup ruleFlowGroup = (RuleFlowGroup) agenda.getRuleFlowGroup("rule-flow-group-0"); // create a rule for the agendaGroup final RuleImpl rule = new RuleImpl("test-rule"); rule.setAgendaGroup("rule-flow-group-0"); final RuleTerminalNode node = new RuleTerminalNode(2, new MockTupleSource(2), rule, rule.getLhs(), 0, buildContext); final RuleTerminalNodeLeftTuple tuple1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node, true); final RuleTerminalNodeLeftTuple tuple2 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node, true); final RuleTerminalNodeLeftTuple tuple3 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node, true); final PropagationContext context = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule, null, new DefaultFactHandle()); // When both the rule is lock-on-active and the agenda group is active, activations should be ignored rule.setLockOnActive(true); ruleFlowGroup.setAutoDeactivate(false); ((InternalRuleFlowGroup) ruleFlowGroup).setActive(true); node.assertLeftTuple(tuple1, context, ksession); // activation should be ignored assertEquals(0, ruleFlowGroup.size()); // lock-on-active is now false so activation should propagate rule.setLockOnActive(false); node.assertLeftTuple(tuple2, context, ksession); agenda.unstageActivations(); assertEquals(1, ruleFlowGroup.size()); // even if lock-on-active is true, unless the agenda group is active the activation will still propagate rule.setLockOnActive(true); ((InternalAgendaGroup) ruleFlowGroup).setActive(false); node.assertLeftTuple(tuple3, context, ksession); agenda.unstageActivations(); assertEquals(2, ruleFlowGroup.size()); } @Test public void testSequentialAgenda() { RuleBaseConfiguration conf = new RuleBaseConfiguration(); conf.setPhreakEnabled(false); conf.setSequential(true); InternalKnowledgeBase kBase = (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase(conf); // create the consequence final Consequence consequence = new Consequence() { private static final long serialVersionUID = 510l; public void evaluate(KnowledgeHelper knowledgeHelper, WorkingMemory workingMemory) { // do nothing } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } public String getName() { return "default"; } }; // create a rule for each agendaGroup final RuleImpl rule0 = new RuleImpl("test-rule0"); final RuleTerminalNode node0 = new RuleTerminalNode(3, new MockTupleSource(2), rule0, rule0.getLhs(), 0, buildContext); rule0.setConsequence(consequence); final PropagationContext context0 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule0, null, new DefaultFactHandle()); final RuleImpl rule1 = new RuleImpl("test-rule1", "agendaGroup1"); final RuleTerminalNode node1 = new RuleTerminalNode(5, new MockTupleSource(4), rule1, rule1.getLhs(), 0, buildContext); rule1.setConsequence(consequence); final PropagationContext context1 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule1, null, new DefaultFactHandle()); final RuleImpl rule2 = new RuleImpl("test-rule2", "agendaGroup1"); final RuleTerminalNode node2 = new RuleTerminalNode(7, new MockTupleSource(6), rule2, rule2.getLhs(), 0, buildContext); rule2.setConsequence(consequence); final PropagationContext context2 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule2, null, new DefaultFactHandle()); final RuleImpl rule3 = new RuleImpl("test-rule3", "agendaGroup2"); final RuleTerminalNode node3 = new RuleTerminalNode(9, new MockTupleSource(8), rule3, rule3.getLhs(), 0, buildContext); rule3.setConsequence(consequence); final PropagationContext context3 = pctxFactory.createPropagationContext(0, PropagationContext.Type.INSERTION, rule3, null, new DefaultFactHandle()); final RuleTerminalNodeLeftTuple tuple0 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(1, "cheese"), node0, true); final RuleTerminalNodeLeftTuple tuple2_1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(2, "cheese"), node2, true); final RuleTerminalNodeLeftTuple tuple2_2 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(3, "cheese"), node2, true); final RuleTerminalNodeLeftTuple tuple3_1 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(4, "cheese"), node3, true); final RuleTerminalNodeLeftTuple tuple3_2 = new RuleTerminalNodeLeftTuple(new DefaultFactHandle(5, "cheese"), node3, true); InternalWorkingMemory workingMemory = new StatefulKnowledgeSessionImpl(0L, kBase); final InternalAgenda agenda = (InternalAgenda) workingMemory.getAgenda(); final AgendaGroup agendaGroup1 = agenda.getAgendaGroup("agendaGroup1"); final AgendaGroup agendaGroup2 = agenda.getAgendaGroup("agendaGroup2"); // focus at this point is MAIN assertEquals(0, agenda.focusStackSize()); node0.assertLeftTuple(tuple0, context0, workingMemory); agenda.unstageActivations(); // check focus is main final AgendaGroup main = agenda.getAgendaGroup(AgendaGroup.MAIN); assertEquals(agenda.getFocus(), main); // check main got the tuple assertEquals(1, agenda.getFocus().size()); node2.assertLeftTuple(tuple2_1, context2, workingMemory); agenda.unstageActivations(); // main is still focus and this tuple went to agendaGroup1 assertEquals(1, agenda.getFocus().size()); // check agendaGroup1 still got the tuple assertEquals(1, agendaGroup1.size()); // make sure total agenda size reflects this assertEquals(2, agenda.agendaSize()); // put another one on agendaGroup 1 node2.assertLeftTuple(tuple2_2, context2, workingMemory); agenda.unstageActivations(); // main is still focus so shouldn't have increased assertEquals(1, agenda.getFocus().size()); // check agendaGroup2 still got the tuple assertEquals(2, agendaGroup1.size()); // make sure total agenda size reflects this assertEquals(3, agenda.agendaSize()); // set the focus to agendaGroup1, note agendaGroup1 has no activations agenda.setFocus("agendaGroup1"); // add agendaGroup2 onto the focus stack agenda.setFocus("agendaGroup2"); // agendaGroup2, the current focus, has no activations assertEquals(0, agenda.getFocus().size()); // add to agendaGroup2 node3.assertLeftTuple(tuple3_1, context3, workingMemory); agenda.unstageActivations(); assertEquals(1, agenda.getFocus().size()); node3.assertLeftTuple(tuple3_2, context3, workingMemory); agenda.unstageActivations(); // agendaGroup2 now has 2 activations assertEquals(2, agenda.getFocus().size()); // check totalAgendaSize still works assertEquals(5, agenda.agendaSize()); // ok now lets check that stacks work with fireNextItem agenda.fireNextItem(null, 0, -1); // agendaGroup2 should still be the current agendaGroup assertEquals(agendaGroup2, agenda.getFocus()); // agendaGroup2 has gone from 2 to one activations assertEquals(1, agenda.getFocus().size()); // check totalAgendaSize has reduced too assertEquals(4, agenda.agendaSize()); // now repeat the process agenda.fireNextItem(null, 0, -1); // focus is still agendaGroup2, but now its empty assertEquals(agendaGroup2, agenda.getFocus()); assertEquals(0, agenda.getFocus().size()); assertEquals(3, agenda.agendaSize()); // repeat fire again agenda.fireNextItem(null, 0, -1); // agendaGroup2 is empty so it should be popped from the stack making agendaGroup1 the current agendaGroup assertEquals(agendaGroup1, agenda.getFocus()); // agendaGroup1 had 2 activations, now it only has 1 assertEquals(1, agenda.getFocus().size()); assertEquals(2, agenda.agendaSize()); // repeat fire again agenda.fireNextItem(null, 0, -1); assertEquals(agendaGroup1, agenda.getFocus()); assertEquals(0, agenda.getFocus().size()); assertEquals(1, agenda.agendaSize()); // this last fire is more interesting as it demonstrates that // agendaGroup1 on // the stack before agendaGroup2 gets skipped as it has no activations agenda.fireNextItem(null, 0, -1); assertEquals(agenda.getFocus(), main); assertEquals(0, agenda.getFocus().size()); assertEquals(0, agenda.agendaSize()); } @Test public void testNullErrorOnGetScheduledActivations() { StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession(); try { ((InternalAgenda) ksession.getAgenda()).getScheduledActivations(); } catch (NullPointerException e) { fail("Exception Should not have been thrown"); } } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest; import static com.jayway.restassured.RestAssured.given; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_TASK_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.NON_EXISTING_ID; import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.argThat; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status; import org.camunda.bpm.engine.AuthorizationException; import org.camunda.bpm.engine.ProcessEngineException; import org.camunda.bpm.engine.TaskService; import org.camunda.bpm.engine.impl.TaskServiceImpl; import org.camunda.bpm.engine.impl.core.variable.type.ObjectTypeImpl; import org.camunda.bpm.engine.impl.digest._apacheCommonsCodec.Base64; import org.camunda.bpm.engine.impl.util.IoUtil; import org.camunda.bpm.engine.rest.exception.InvalidRequestException; import org.camunda.bpm.engine.rest.exception.RestException; import org.camunda.bpm.engine.rest.helper.EqualsList; import org.camunda.bpm.engine.rest.helper.EqualsMap; import org.camunda.bpm.engine.rest.helper.ErrorMessageHelper; import org.camunda.bpm.engine.rest.helper.MockObjectValue; import org.camunda.bpm.engine.rest.helper.MockProvider; import org.camunda.bpm.engine.rest.helper.VariableTypeHelper; import org.camunda.bpm.engine.rest.helper.variable.EqualsNullValue; import org.camunda.bpm.engine.rest.helper.variable.EqualsObjectValue; import org.camunda.bpm.engine.rest.helper.variable.EqualsPrimitiveValue; import org.camunda.bpm.engine.rest.helper.variable.EqualsUntypedValue; import org.camunda.bpm.engine.rest.util.VariablesBuilder; import org.camunda.bpm.engine.variable.Variables; import org.camunda.bpm.engine.variable.type.ValueType; import org.camunda.bpm.engine.variable.value.BooleanValue; import org.camunda.bpm.engine.variable.value.FileValue; import org.camunda.bpm.engine.variable.value.ObjectValue; import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.type.TypeFactory; import com.jayway.restassured.http.ContentType; import com.jayway.restassured.response.Response; /** * @author Daniel Meyer * */ public class AbstractTaskVariableRestResourceInteractionTest extends AbstractRestServiceTest { protected static final String TASK_SERVICE_URL = TEST_RESOURCE_ROOT_PATH + "/task"; protected static final String SINGLE_TASK_URL = TASK_SERVICE_URL + "/{id}"; protected static final String SINGLE_TASK_VARIABLES_URL = SINGLE_TASK_URL + "/variables"; protected static final String SINGLE_TASK_SINGLE_VARIABLE_URL = SINGLE_TASK_VARIABLES_URL + "/{varId}"; protected static final String SINGLE_TASK_PUT_SINGLE_VARIABLE_URL = SINGLE_TASK_SINGLE_VARIABLE_URL; protected static final String SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL = SINGLE_TASK_PUT_SINGLE_VARIABLE_URL + "/data"; protected static final String SINGLE_TASK_DELETE_SINGLE_VARIABLE_URL = SINGLE_TASK_SINGLE_VARIABLE_URL; protected static final String SINGLE_TASK_MODIFY_VARIABLES_URL = SINGLE_TASK_VARIABLES_URL; protected TaskService taskServiceMock; @Before public void setUpRuntimeData() { taskServiceMock = mock(TaskService.class); when(processEngine.getTaskService()).thenReturn(taskServiceMock); } private TaskServiceImpl mockTaskServiceImpl() { TaskServiceImpl taskServiceMock = mock(TaskServiceImpl.class); when(processEngine.getTaskService()).thenReturn(taskServiceMock); return taskServiceMock; } @Test public void testGetVariables() { when(taskServiceMock.getVariablesTyped(EXAMPLE_TASK_ID, true)).thenReturn(EXAMPLE_VARIABLES); Response response = given().pathParam("id", EXAMPLE_TASK_ID) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.OK.getStatusCode()) .body(EXAMPLE_VARIABLE_KEY, notNullValue()) .body(EXAMPLE_VARIABLE_KEY + ".value", equalTo(EXAMPLE_VARIABLE_VALUE.getValue())) .body(EXAMPLE_VARIABLE_KEY + ".type", equalTo(VariableTypeHelper.toExpectedValueTypeName(EXAMPLE_VARIABLE_VALUE.getType()))) .when().get(SINGLE_TASK_VARIABLES_URL); Assert.assertEquals("Should return exactly one variable", 1, response.jsonPath().getMap("").size()); } @Test public void testGetObjectVariables() { // given String variableKey = "aVariableId"; List<String> payload = Arrays.asList("a", "b"); ObjectValue variableValue = MockObjectValue .fromObjectValue(Variables .objectValue(payload) .serializationDataFormat("application/json") .create()) .objectTypeName(ArrayList.class.getName()) .serializedValue("a serialized value"); // this should differ from the serialized json when(taskServiceMock.getVariablesTyped(eq(EXAMPLE_TASK_ID), anyBoolean())) .thenReturn(Variables.createVariables().putValueTyped(variableKey, variableValue)); // when given().pathParam("id", EXAMPLE_TASK_ID) .then().expect().statusCode(Status.OK.getStatusCode()) .body(variableKey + ".value", equalTo(payload)) .body(variableKey + ".type", equalTo("Object")) .body(variableKey + ".valueInfo." + ObjectTypeImpl.VALUE_INFO_SERIALIZATION_DATA_FORMAT, equalTo("application/json")) .body(variableKey + ".valueInfo." + ObjectTypeImpl.VALUE_INFO_OBJECT_TYPE_NAME, equalTo(ArrayList.class.getName())) .when().get(SINGLE_TASK_VARIABLES_URL); // then verify(taskServiceMock).getVariablesTyped(EXAMPLE_TASK_ID, true); } @Test public void testGetObjectVariablesSerialized() { // given String variableKey = "aVariableId"; ObjectValue variableValue = Variables .serializedObjectValue("a serialized value") .serializationDataFormat("application/json") .objectTypeName(ArrayList.class.getName()) .create(); when(taskServiceMock.getVariablesTyped(eq(EXAMPLE_TASK_ID), anyBoolean())) .thenReturn(Variables.createVariables().putValueTyped(variableKey, variableValue)); // when given() .pathParam("id", EXAMPLE_TASK_ID) .queryParam("deserializeValues", false) .then().expect().statusCode(Status.OK.getStatusCode()) .body(variableKey + ".value", equalTo("a serialized value")) .body(variableKey + ".type", equalTo("Object")) .body(variableKey + ".valueInfo." + ObjectTypeImpl.VALUE_INFO_SERIALIZATION_DATA_FORMAT, equalTo("application/json")) .body(variableKey + ".valueInfo." + ObjectTypeImpl.VALUE_INFO_OBJECT_TYPE_NAME, equalTo(ArrayList.class.getName())) .when().get(SINGLE_TASK_VARIABLES_URL); // then verify(taskServiceMock).getVariablesTyped(EXAMPLE_TASK_ID, false); } @Test public void testGetVariablesForNonExistingTaskId() { when(taskServiceMock.getVariablesTyped(NON_EXISTING_ID, true)).thenThrow(new ProcessEngineException("task " + NON_EXISTING_ID + " doesn't exist")); given().pathParam("id", NON_EXISTING_ID) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()).contentType(ContentType.JSON) .body("type", equalTo(ProcessEngineException.class.getSimpleName())) .body("message", equalTo("task " + NON_EXISTING_ID + " doesn't exist")) .when().get(SINGLE_TASK_VARIABLES_URL); } @Test public void testGetVariablesThrowsAuthorizationException() { String message = "expected exception"; when(taskServiceMock.getVariablesTyped(anyString(), anyBoolean())).thenThrow(new AuthorizationException(message)); given() .pathParam("id", EXAMPLE_TASK_ID) .then().expect() .statusCode(Status.FORBIDDEN.getStatusCode()) .contentType(ContentType.JSON) .body("type", equalTo(AuthorizationException.class.getSimpleName())) .body("message", equalTo(message)) .when() .get(SINGLE_TASK_VARIABLES_URL); } @Test public void testVariableModification() { TaskServiceImpl taskServiceMock = mockTaskServiceImpl(); Map<String, Object> messageBodyJson = new HashMap<String, Object>(); String variableKey = "aKey"; int variableValue = 123; Map<String, Object> modifications = VariablesBuilder.create().variable(variableKey, variableValue).getVariables(); messageBodyJson.put("modifications", modifications); List<String> deletions = new ArrayList<String>(); deletions.add("deleteKey"); messageBodyJson.put("deletions", deletions); given().pathParam("id", EXAMPLE_TASK_ID).contentType(ContentType.JSON).body(messageBodyJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().post(SINGLE_TASK_MODIFY_VARIABLES_URL); Map<String, Object> expectedModifications = new HashMap<String, Object>(); expectedModifications.put(variableKey, variableValue); verify(taskServiceMock).updateVariables(eq(EXAMPLE_TASK_ID), argThat(new EqualsMap(expectedModifications)), argThat(new EqualsList(deletions))); } @Test public void testVariableModificationForNonExistingTaskId() { TaskServiceImpl taskServiceMock = mockTaskServiceImpl(); doThrow(new ProcessEngineException("Cannot find task with id " + NON_EXISTING_ID)).when(taskServiceMock).updateVariables(anyString(), any(Map.class), any(List.class)); Map<String, Object> messageBodyJson = new HashMap<String, Object>(); String variableKey = "aKey"; int variableValue = 123; Map<String, Object> modifications = VariablesBuilder.create().variable(variableKey, variableValue).getVariables(); messageBodyJson.put("modifications", modifications); given().pathParam("id", NON_EXISTING_ID).contentType(ContentType.JSON).body(messageBodyJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()).contentType(ContentType.JSON) .body("type", equalTo(RestException.class.getSimpleName())) .body("message", equalTo("Cannot modify variables for task " + NON_EXISTING_ID + ": Cannot find task with id " + NON_EXISTING_ID)) .when().post(SINGLE_TASK_MODIFY_VARIABLES_URL); } @Test public void testEmptyVariableModification() { mockTaskServiceImpl(); given().pathParam("id", EXAMPLE_TASK_ID).contentType(ContentType.JSON).body(EMPTY_JSON_OBJECT) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().post(SINGLE_TASK_MODIFY_VARIABLES_URL); } @Test public void testVariableModificationThrowsAuthorizationException() { String variableKey = "aKey"; int variableValue = 123; Map<String, Object> messageBodyJson = new HashMap<String, Object>(); Map<String, Object> modifications = VariablesBuilder.create().variable(variableKey, variableValue).getVariables(); messageBodyJson.put("modifications", modifications); TaskServiceImpl taskServiceMock = mockTaskServiceImpl(); String message = "excpected exception"; doThrow(new AuthorizationException(message)).when(taskServiceMock).updateVariables(anyString(), any(Map.class), any(List.class)); given() .pathParam("id", EXAMPLE_TASK_ID) .contentType(ContentType.JSON) .body(messageBodyJson) .then().expect() .statusCode(Status.FORBIDDEN.getStatusCode()) .body("type", is(AuthorizationException.class.getSimpleName())) .body("message", is(message)) .when() .post(SINGLE_TASK_MODIFY_VARIABLES_URL); } @Test public void testGetSingleVariable() { String variableKey = "aVariableKey"; int variableValue = 123; when(taskServiceMock.getVariableTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())) .thenReturn(Variables.integerValue(variableValue)); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.OK.getStatusCode()) .body("value", is(123)) .body("type", is("Integer")) .when().get(SINGLE_TASK_SINGLE_VARIABLE_URL); } @Test public void testGetSingleVariableData() { when(taskServiceMock.getVariableTyped(anyString(), eq(EXAMPLE_BYTES_VARIABLE_KEY), eq(false))).thenReturn(EXAMPLE_VARIABLE_VALUE_BYTES); given() .pathParam("id", MockProvider.EXAMPLE_TASK_ID) .pathParam("varId", EXAMPLE_BYTES_VARIABLE_KEY) .then() .expect() .statusCode(Status.OK.getStatusCode()) .contentType(MediaType.APPLICATION_OCTET_STREAM) .when() .get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); verify(taskServiceMock).getVariableTyped(MockProvider.EXAMPLE_TASK_ID, EXAMPLE_BYTES_VARIABLE_KEY, false); } @Test public void testGetSingleVariableDataNonExisting() { when(taskServiceMock.getVariableTyped(anyString(), eq("nonExisting"), eq(false))).thenReturn(null); given() .pathParam("id", MockProvider.EXAMPLE_TASK_ID) .pathParam("varId", "nonExisting") .then() .expect() .statusCode(Status.NOT_FOUND.getStatusCode()) .body("type", is(InvalidRequestException.class.getSimpleName())) .body("message", is("task variable with name " + "nonExisting" + " does not exist")) .when() .get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); verify(taskServiceMock).getVariableTyped(MockProvider.EXAMPLE_TASK_ID, "nonExisting", false); } @Test public void testGetSingleVariabledataNotBinary() { when(taskServiceMock.getVariableTyped(anyString(), eq(EXAMPLE_VARIABLE_KEY), eq(false))).thenReturn(EXAMPLE_VARIABLE_VALUE); given() .pathParam("id", MockProvider.EXAMPLE_TASK_ID) .pathParam("varId", EXAMPLE_VARIABLE_KEY) .then() .expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .when() .get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); verify(taskServiceMock).getVariableTyped(MockProvider.EXAMPLE_TASK_ID, EXAMPLE_VARIABLE_KEY, false); } @Test public void testGetSingleObjectVariable() { // given String variableKey = "aVariableId"; List<String> payload = Arrays.asList("a", "b"); ObjectValue variableValue = MockObjectValue .fromObjectValue(Variables .objectValue(payload) .serializationDataFormat("application/json") .create()) .objectTypeName(ArrayList.class.getName()) .serializedValue("a serialized value"); // this should differ from the serialized json when(taskServiceMock.getVariableTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue); // when given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .then().expect().statusCode(Status.OK.getStatusCode()) .body("value", equalTo(payload)) .body("type", equalTo("Object")) .body("valueInfo." + ObjectTypeImpl.VALUE_INFO_SERIALIZATION_DATA_FORMAT, equalTo("application/json")) .body("valueInfo." + ObjectTypeImpl.VALUE_INFO_OBJECT_TYPE_NAME, equalTo(ArrayList.class.getName())) .when().get(SINGLE_TASK_SINGLE_VARIABLE_URL); // then verify(taskServiceMock).getVariableTyped(EXAMPLE_TASK_ID, variableKey, true); } @Test public void testGetSingleObjectVariableSerialized() { // given String variableKey = "aVariableId"; ObjectValue variableValue = Variables .serializedObjectValue("a serialized value") .serializationDataFormat("application/json") .objectTypeName(ArrayList.class.getName()) .create(); when(taskServiceMock.getVariableTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue); // when given() .pathParam("id", EXAMPLE_TASK_ID) .pathParam("varId", variableKey) .queryParam("deserializeValue", false) .then().expect().statusCode(Status.OK.getStatusCode()) .body("value", equalTo("a serialized value")) .body("type", equalTo("Object")) .body("valueInfo." + ObjectTypeImpl.VALUE_INFO_SERIALIZATION_DATA_FORMAT, equalTo("application/json")) .body("valueInfo." + ObjectTypeImpl.VALUE_INFO_OBJECT_TYPE_NAME, equalTo(ArrayList.class.getName())) .when().get(SINGLE_TASK_SINGLE_VARIABLE_URL); // then verify(taskServiceMock).getVariableTyped(EXAMPLE_TASK_ID, variableKey, false); } @Test public void testNonExistingVariable() { String variableKey = "aVariableKey"; when(taskServiceMock.getVariable(eq(EXAMPLE_TASK_ID), eq(variableKey))).thenReturn(null); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NOT_FOUND.getStatusCode()) .body("type", is(InvalidRequestException.class.getSimpleName())) .body("message", is("task variable with name " + variableKey + " does not exist")) .when().get(SINGLE_TASK_SINGLE_VARIABLE_URL); } @Test public void testGetVariableForNonExistingTaskId() { String variableKey = "aVariableKey"; when(taskServiceMock.getVariableTyped(eq(NON_EXISTING_ID), eq(variableKey), anyBoolean())) .thenThrow(new ProcessEngineException("task " + NON_EXISTING_ID + " doesn't exist")); given().pathParam("id", NON_EXISTING_ID).pathParam("varId", variableKey) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()) .body("type", is(RestException.class.getSimpleName())) .body("message", is("Cannot get task variable " + variableKey + ": task " + NON_EXISTING_ID + " doesn't exist")) .when().get(SINGLE_TASK_SINGLE_VARIABLE_URL); } @Test public void testGetSingleVariableThrowsAuthorizationException() { String variableKey = "aVariableKey"; String message = "excpected exception"; when(taskServiceMock.getVariableTyped(anyString(), anyString(), anyBoolean())).thenThrow(new AuthorizationException(message)); given() .pathParam("id", EXAMPLE_TASK_ID) .pathParam("varId", variableKey) .then().expect() .statusCode(Status.FORBIDDEN.getStatusCode()) .body("type", is(AuthorizationException.class.getSimpleName())) .body("message", is(message)) .when() .get(SINGLE_TASK_SINGLE_VARIABLE_URL); } @Test public void testGetFileVariable() { String variableKey = "aVariableKey"; final byte[] byteContent = "some bytes".getBytes(); String filename = "test.txt"; String mimeType = "text/plain"; FileValue variableValue = Variables.fileValue(filename).file(byteContent).mimeType(mimeType).create(); when(taskServiceMock.getVariableTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .then().expect() .statusCode(Status.OK.getStatusCode()) .contentType(ContentType.JSON.toString()) .and() .body("valueInfo.mimeType", equalTo(mimeType)) .body("valueInfo.filename", equalTo(filename)) .body("value", nullValue()) .when().get(SINGLE_TASK_SINGLE_VARIABLE_URL); } @Test public void testGetNullFileVariable() { String variableKey = "aVariableKey"; String filename = "test.txt"; String mimeType = "text/plain"; FileValue variableValue = Variables.fileValue(filename).mimeType(mimeType).create(); when(taskServiceMock.getVariableTyped(eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())) .thenReturn(variableValue); given() .pathParam("id", MockProvider.EXAMPLE_TASK_ID) .pathParam("varId", variableKey) .then().expect() .statusCode(Status.OK.getStatusCode()) .contentType(ContentType.TEXT.toString()) .and() .body(is(equalTo(""))) .when().get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); } @Test public void testGetFileVariableDownloadWithType() { String variableKey = "aVariableKey"; final byte[] byteContent = "some bytes".getBytes(); String filename = "test.txt"; FileValue variableValue = Variables.fileValue(filename).file(byteContent).mimeType(ContentType.TEXT.toString()).create(); when(taskServiceMock.getVariableTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue); given() .pathParam("id", EXAMPLE_TASK_ID) .pathParam("varId", variableKey) .then().expect() .statusCode(Status.OK.getStatusCode()) .contentType(ContentType.TEXT.toString()) .and() .body(is(equalTo(new String(byteContent)))) .when().get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); } @Test public void testGetFileVariableDownloadWithTypeAndEncoding() { String variableKey = "aVariableKey"; final byte[] byteContent = "some bytes".getBytes(); String filename = "test.txt"; String encoding = "UTF-8"; FileValue variableValue = Variables.fileValue(filename).file(byteContent).mimeType(ContentType.TEXT.toString()).encoding(encoding).create(); when(taskServiceMock.getVariableTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue); given() .pathParam("id", EXAMPLE_TASK_ID) .pathParam("varId", variableKey) .then().expect() .statusCode(Status.OK.getStatusCode()) .contentType(either(CoreMatchers.<Object>equalTo(ContentType.TEXT.toString() + "; charset=UTF-8")).or(CoreMatchers.<Object>equalTo(ContentType.TEXT.toString() + ";charset=UTF-8"))) .and() .body(is(equalTo(new String(byteContent)))) .when().get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); } @Test public void testGetFileVariableDownloadWithoutType() { String variableKey = "aVariableKey"; final byte[] byteContent = "some bytes".getBytes(); String filename = "test.txt"; FileValue variableValue = Variables.fileValue(filename).file(byteContent).create(); when(taskServiceMock.getVariableTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue); given() .pathParam("id", EXAMPLE_TASK_ID) .pathParam("varId", variableKey) .then().expect() .statusCode(Status.OK.getStatusCode()) .contentType(MediaType.APPLICATION_OCTET_STREAM) .and() .body(is(equalTo(new String(byteContent)))) .header("Content-Disposition", containsString(filename)) .when().get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); } @Test public void testCannotDownloadVariableOtherThanFile() { String variableKey = "aVariableKey"; BooleanValue variableValue = Variables.booleanValue(true); when(taskServiceMock.getVariableTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue); given() .pathParam("id", EXAMPLE_TASK_ID) .pathParam("varId", variableKey) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .when().get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); } @Test public void testPutSingleVariable() { String variableKey = "aVariableKey"; String variableValue = "aVariableValue"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsUntypedValue.matcher().value(variableValue))); } @Test public void testPutSingleVariableWithTypeInteger() { String variableKey = "aVariableKey"; Integer variableValue = 123; String type = "Integer"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsPrimitiveValue.integerValue(variableValue))); } @Test public void testPutSingleVariableWithUnparseableInteger() { String variableKey = "aVariableKey"; String variableValue = "1abc"; String type = "Integer"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", equalTo("Cannot put task variable " + variableKey + ": " + ErrorMessageHelper.getExpectedFailingConversionMessage(variableValue, type, Integer.class))) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); } @Test public void testPutSingleVariableWithTypeShort() { String variableKey = "aVariableKey"; Short variableValue = 123; String type = "Short"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsPrimitiveValue.shortValue(variableValue))); } @Test public void testPutSingleVariableWithUnparseableShort() { String variableKey = "aVariableKey"; String variableValue = "1abc"; String type = "Short"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", equalTo("Cannot put task variable " + variableKey + ": " + ErrorMessageHelper.getExpectedFailingConversionMessage(variableValue, type, Short.class))) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); } @Test public void testPutSingleVariableWithTypeLong() { String variableKey = "aVariableKey"; Long variableValue = Long.valueOf(123); String type = "Long"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsPrimitiveValue.longValue(variableValue))); } @Test public void testPutSingleVariableWithUnparseableLong() { String variableKey = "aVariableKey"; String variableValue = "1abc"; String type = "Long"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", MockProvider.EXAMPLE_EXECUTION_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", equalTo("Cannot put task variable " + variableKey + ": " + ErrorMessageHelper.getExpectedFailingConversionMessage(variableValue, type, Long.class))) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); } @Test public void testPutSingleVariableWithTypeDouble() { String variableKey = "aVariableKey"; Double variableValue = 123.456; String type = "Double"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsPrimitiveValue.doubleValue(variableValue))); } @Test public void testPutSingleVariableWithUnparseableDouble() { String variableKey = "aVariableKey"; String variableValue = "1abc"; String type = "Double"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", equalTo("Cannot put task variable " + variableKey + ": " + ErrorMessageHelper.getExpectedFailingConversionMessage(variableValue, type, Double.class))) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); } @Test public void testPutSingleVariableWithTypeBoolean() { String variableKey = "aVariableKey"; Boolean variableValue = true; String type = "Boolean"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsPrimitiveValue.booleanValue(variableValue))); } @Test public void testPutSingleVariableWithTypeDate() throws Exception { Date now = new Date(); SimpleDateFormat pattern = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); String variableKey = "aVariableKey"; String variableValue = pattern.format(now); String type = "Date"; Date expectedValue = pattern.parse(variableValue); Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsPrimitiveValue.dateValue(expectedValue))); } @Test public void testPutSingleVariableWithUnparseableDate() { String variableKey = "aVariableKey"; String variableValue = "1abc"; String type = "Date"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", equalTo("Cannot put task variable " + variableKey + ": " + ErrorMessageHelper.getExpectedFailingConversionMessage(variableValue, type, Date.class))) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); } @Test public void testPutSingleVariableWithNotSupportedType() { String variableKey = "aVariableKey"; String variableValue = "1abc"; String type = "X"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", equalTo("Cannot put task variable " + variableKey + ": Unsupported value type 'X'")) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); } @Test public void testPutSingleVariableWithNoValue() { String variableKey = "aVariableKey"; given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(EMPTY_JSON_OBJECT) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsNullValue.matcher())); } @Test public void testPutVariableForNonExistingTaskId() { String variableKey = "aVariableKey"; String variableValue = "aVariableValue"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue); doThrow(new ProcessEngineException("Cannot find task with id " + NON_EXISTING_ID)) .when(taskServiceMock).setVariable(eq(NON_EXISTING_ID), eq(variableKey), any()); given().pathParam("id", NON_EXISTING_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(variableJson) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()) .body("type", is(RestException.class.getSimpleName())) .body("message", is("Cannot put task variable " + variableKey + ": Cannot find task with id " + NON_EXISTING_ID)) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); } @Test public void testPutSingleVariableThrowsAuthorizationException() { String variableKey = "aVariableKey"; String variableValue = "1abc"; String type = "String"; Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type); String message = "expected exception"; doThrow(new AuthorizationException(message)).when(taskServiceMock).setVariable(anyString(), anyString(), any()); given() .pathParam("id", EXAMPLE_TASK_ID) .pathParam("varId", variableKey) .contentType(ContentType.JSON) .body(variableJson) .then().expect() .statusCode(Status.FORBIDDEN.getStatusCode()) .body("type", equalTo(AuthorizationException.class.getSimpleName())) .body("message", equalTo(message)) .when() .put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); } @Test public void testPostSingleBinaryVariable() throws Exception { byte[] bytes = "someContent".getBytes(); String variableKey = "aVariableKey"; given() .pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .multiPart("data", null, bytes) .header("accept", MediaType.APPLICATION_JSON) .expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsPrimitiveValue.bytesValue(bytes))); } @Test public void testPostSingleBinaryVariableWithNoValue() throws Exception { byte[] bytes = new byte[0]; String variableKey = "aVariableKey"; given() .pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .multiPart("data", null, bytes) .header("accept", MediaType.APPLICATION_JSON) .expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsPrimitiveValue.bytesValue(bytes))); } @Test public void testPutSingleBinaryVariableThrowsAuthorizationException() { byte[] bytes = "someContent".getBytes(); String variableKey = "aVariableKey"; String message = "expected exception"; doThrow(new AuthorizationException(message)).when(taskServiceMock).setVariable(anyString(), anyString(), any()); given() .pathParam("id", EXAMPLE_TASK_ID) .pathParam("varId", variableKey) .multiPart("data", "unspecified", bytes) .expect() .statusCode(Status.FORBIDDEN.getStatusCode()) .contentType(ContentType.JSON) .body("type", equalTo(AuthorizationException.class.getSimpleName())) .body("message", equalTo(message)) .when() .post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); } @Test public void testPostSingleSerializableVariable() throws Exception { ArrayList<String> serializable = new ArrayList<String>(); serializable.add("foo"); ObjectMapper mapper = new ObjectMapper(); String jsonBytes = mapper.writeValueAsString(serializable); String typeName = TypeFactory.defaultInstance().constructType(serializable.getClass()).toCanonical(); String variableKey = "aVariableKey"; given() .pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .multiPart("data", jsonBytes, MediaType.APPLICATION_JSON) .multiPart("type", typeName, MediaType.TEXT_PLAIN) .header("accept", MediaType.APPLICATION_JSON) .expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); verify(taskServiceMock).setVariable(eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsObjectValue.objectValueMatcher().isDeserialized().value(serializable))); } @Test public void testPostSingleSerializableVariableUnsupportedMediaType() throws Exception { ArrayList<String> serializable = new ArrayList<String>(); serializable.add("foo"); ObjectMapper mapper = new ObjectMapper(); String jsonBytes = mapper.writeValueAsString(serializable); String typeName = TypeFactory.defaultInstance().constructType(serializable.getClass()).toCanonical(); String variableKey = "aVariableKey"; given() .pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .multiPart("data", jsonBytes, "unsupported") .multiPart("type", typeName, MediaType.TEXT_PLAIN) .header("accept", MediaType.APPLICATION_JSON) .expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body(containsString("Unrecognized content type for serialized java type: unsupported")) .when() .post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); verify(taskServiceMock, never()).setVariable(eq(EXAMPLE_TASK_ID), eq(variableKey), eq(serializable)); } @Test public void testPostSingleFileVariableWithEncodingAndMimeType() throws Exception { byte[] value = "some text".getBytes(); String variableKey = "aVariableKey"; String encoding = "utf-8"; String filename = "test.txt"; String mimetype = MediaType.TEXT_PLAIN; given() .pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .multiPart("data", filename, value, mimetype + "; encoding="+encoding) .header("accept", MediaType.APPLICATION_JSON) .expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); ArgumentCaptor<FileValue> captor = ArgumentCaptor.forClass(FileValue.class); verify(taskServiceMock).setVariable(eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey), captor.capture()); FileValue captured = captor.getValue(); assertThat(captured.getEncoding(), is(encoding)); assertThat(captured.getFilename(), is(filename)); assertThat(captured.getMimeType(), is(mimetype)); assertThat(IoUtil.readInputStream(captured.getValue(), null), is(value)); } @Test public void testPostSingleFileVariableWithMimeType() throws Exception { byte[] value = "some text".getBytes(); String base64 = Base64.encodeBase64String(value); String variableKey = "aVariableKey"; String filename = "test.txt"; String mimetype = MediaType.TEXT_PLAIN; given() .pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .multiPart("data", filename, value, mimetype) .header("accept", MediaType.APPLICATION_JSON) .expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); ArgumentCaptor<FileValue> captor = ArgumentCaptor.forClass(FileValue.class); verify(taskServiceMock).setVariable(eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey), captor.capture()); FileValue captured = captor.getValue(); assertThat(captured.getEncoding(), is(nullValue())); assertThat(captured.getFilename(), is(filename)); assertThat(captured.getMimeType(), is(mimetype)); assertThat(IoUtil.readInputStream(captured.getValue(), null), is(value)); } @Test public void testPostSingleFileVariableWithEncoding() throws Exception { byte[] value = "some text".getBytes(); String variableKey = "aVariableKey"; String encoding = "utf-8"; String filename = "test.txt"; given() .pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .multiPart("data", filename, value, "encoding="+encoding) .header("accept", MediaType.APPLICATION_JSON) .expect() //when the user passes an encoding, he has to provide the type, too .statusCode(Status.BAD_REQUEST.getStatusCode()) .when() .post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); } @Test public void testPostSingleFileVariableOnlyFilename() throws Exception { String variableKey = "aVariableKey"; String filename = "test.txt"; given() .pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .multiPart("data", filename, new byte[0]) .header("accept", MediaType.APPLICATION_JSON) .expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL); ArgumentCaptor<FileValue> captor = ArgumentCaptor.forClass(FileValue.class); verify(taskServiceMock).setVariable(eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey), captor.capture()); FileValue captured = captor.getValue(); assertThat(captured.getEncoding(), is(nullValue())); assertThat(captured.getFilename(), is(filename)); assertThat(captured.getMimeType(), is(MediaType.APPLICATION_OCTET_STREAM)); assertThat(captured.getValue().available(), is(0)); } @Test public void testPutSingleVariableFromSerialized() throws Exception { String serializedValue = "{\"prop\" : \"value\"}"; Map<String, Object> requestJson = VariablesBuilder .getObjectValueMap(serializedValue, ValueType.OBJECT.getName(), "aDataFormat", "aRootType"); String variableKey = "aVariableKey"; given() .pathParam("id", MockProvider.EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON) .body(requestJson) .expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); verify(taskServiceMock).setVariable( eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsObjectValue.objectValueMatcher() .serializedValue(serializedValue) .serializationFormat("aDataFormat") .objectTypeName("aRootType"))); } @Test public void testPutSingleVariableFromInvalidSerialized() throws Exception { String serializedValue = "{\"prop\" : \"value\"}"; Map<String, Object> requestJson = VariablesBuilder .getObjectValueMap(serializedValue, "aNonExistingType", null, null); String variableKey = "aVariableKey"; given() .pathParam("id", MockProvider.EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON) .body(requestJson) .expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", equalTo("Cannot put task variable aVariableKey: Unsupported value type 'aNonExistingType'")) .when() .put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); } @Test public void testPutSingleVariableFromSerializedWithNoValue() { String variableKey = "aVariableKey"; Map<String, Object> requestJson = VariablesBuilder .getObjectValueMap(null, ValueType.OBJECT.getName(), null, null); given().pathParam("id", MockProvider.EXAMPLE_TASK_ID).pathParam("varId", variableKey) .contentType(ContentType.JSON).body(requestJson) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL); verify(taskServiceMock).setVariable( eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey), argThat(EqualsObjectValue.objectValueMatcher())); } @Test public void testDeleteSingleVariable() { String variableKey = "aVariableKey"; given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.NO_CONTENT.getStatusCode()) .when().delete(SINGLE_TASK_DELETE_SINGLE_VARIABLE_URL); verify(taskServiceMock).removeVariable(eq(EXAMPLE_TASK_ID), eq(variableKey)); } @Test public void testDeleteVariableForNonExistingTaskId() { String variableKey = "aVariableKey"; doThrow(new ProcessEngineException("Cannot find task with id " + NON_EXISTING_ID)) .when(taskServiceMock).removeVariable(eq(NON_EXISTING_ID), eq(variableKey)); given().pathParam("id", NON_EXISTING_ID).pathParam("varId", variableKey) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()) .contentType(ContentType.JSON) .body("type", is(RestException.class.getSimpleName())) .body("message", is("Cannot delete task variable " + variableKey + ": Cannot find task with id " + NON_EXISTING_ID)) .when().delete(SINGLE_TASK_DELETE_SINGLE_VARIABLE_URL); } @Test public void testDeleteVariableThrowsAuthorizationException() { String variableKey = "aVariableKey"; String message = "expected exception"; doThrow(new AuthorizationException(message)).when(taskServiceMock).removeVariable(anyString(), anyString()); given() .pathParam("id", EXAMPLE_TASK_ID) .pathParam("varId", variableKey) .then().expect() .statusCode(Status.FORBIDDEN.getStatusCode()) .contentType(ContentType.JSON) .body("type", is(AuthorizationException.class.getSimpleName())) .body("message", is(message)) .when() .delete(SINGLE_TASK_DELETE_SINGLE_VARIABLE_URL); } }
package test.de.uni_hildesheim.sse.vil.buildlang; import static net.ssehub.easy.varModel.varModel.testSupport.TextTestUtils.*; import java.io.File; import java.io.IOException; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import net.ssehub.easy.instantiation.core.model.artifactModel.ArtifactFactory; import net.ssehub.easy.instantiation.core.model.buildlangModel.Script; import net.ssehub.easy.instantiation.core.model.common.VilException; import net.ssehub.easy.instantiation.core.model.defaultInstantiators.RandomDouble; /** * Tests for the basic language. * * @author Holger Eichelberger */ public class ExecutionTests extends AbstractExecutionTest<Script> { private static ExecutionTests tests; private static final String MAIN_RULE = "main"; @Override protected ITestConfigurer<Script> createTestConfigurer() { return new BuildLangTestConfigurer("vil.buildlang.testdata.home"); } /** * Starts up the test. */ @BeforeClass public static void startUp() { tests = new ExecutionTests(); } /** * Tears down the test. */ @AfterClass public static void shutDown() { if (null != tests) { tests.cleanTemp(); tests = null; } } /** * Tests the load properties element. * * @throws IOException should not occur */ @Test public void testLoadProperties() throws IOException { assertEqual("loadProperties"); } /** * Tests loading OS specific properties (contributed by S. Bender). * * @throws IOException should not occur */ @Test public void testLoadPropertiesOS() throws IOException { assertEqual("loadPropertiesOs"); } /** * Tests the load properties element (failing test due to overriden constant). * * @throws IOException should not occur */ @Test public void testLoadPropertiesFail() throws IOException { assertEqual("loadPropertiesFail", VilException.ID_IS_CONSTANT); } /** * Tests basic numerical functions. * * @throws IOException should not occur */ @Test public void testNumbers() throws IOException { assertEqual("numbers"); } /** * Tests basic boolean functions. * * @throws IOException should not occur */ @Test public void testBooleans() throws IOException { assertEqual("boolean"); } /** * Tests alternative expressions with blocks. * * @throws IOException should not occur */ @Test public void testAlternative() throws IOException { assertEqual("alternative"); } /** * Tests a specific recursion case. * * @throws IOException should not occur */ @Test public void testRecursion() throws IOException { assertEqual("recursion"); } /** * Tests the application of a "function pointer" as parameter. * * @throws IOException should not occur */ @Test public void testApply() throws IOException { assertEqual("apply"); } /** * Tests the application of a "function pointer" as variable. * * @throws IOException should not occur */ @Test public void testApply2() throws IOException { assertEqual("apply2"); } /** * Tests typedefs and "function pointer". * * @throws IOException should not occur */ @Test public void testApply3() throws IOException { assertEqual("apply3"); } /** * Tests typedefs and "function pointer" with "Any". * * @throws IOException should not occur */ @Test public void testApply4() throws IOException { assertEqual("apply4"); } /** * Tests a rule as function. * * @throws IOException should not occur */ @Test public void testFunction() throws IOException { assertEqual("function"); } /** * Tests a rule as function with a boolean result (via a constant). * * @throws IOException should not occur */ @Test public void testFunction2() throws IOException { assertEqual("function2"); } /** * Tests a rule as function with a boolean result (via a variable). * * @throws IOException should not occur */ @Test public void testFunction3() throws IOException { assertEqual("function3"); } /** * Tests alternative expressions without blocks. * * @throws IOException should not occur */ @Test public void testAlternative1() throws IOException { assertEqual("alternative1"); } /** * Tests alternative expressions with undefined variable. * * @throws IOException should not occur */ @Test public void testAlternative2() throws IOException { assertEqual("alternative2"); } /** * Tests the random "instantiators" (inspired by K. Schmid). * * @throws IOException should not occur */ @Test public void testRandom() throws IOException { boolean inTests = RandomDouble.setInTests(true); assertEqual("random"); RandomDouble.setInTests(inTests); } /** * Tests basic boolean functions. * * @throws IOException should not occur */ @Test public void testStrings() throws IOException { assertEqual("string"); } /** * Tests basic collection functions. * * @throws IOException should not occur */ @Test public void testCollections() throws IOException { assertEqual("collections"); } /** * Tests basic collection functions (contributed by C. Qin, QM). * * @throws IOException should not occur */ @Test public void testCollections2() throws IOException { assertEqual("collections2"); } /** * Tests basic map functions. * * @throws IOException should not occur */ @Test public void testMap() throws IOException { assertEqual("mapTest"); } /** * Tests simplified map initialization (contributed by C. Qin, QM). * * @throws IOException should not occur */ @Test public void testMap1() throws IOException { assertEqual("mapTest1"); } /** * Tests simple independent variable declarations. * * @throws IOException should not occur */ @Test public void testVariableDeclarations() throws IOException { assertEqual("variableDeclarations"); } /** * Tests simple depending variable declarations. * * @throws IOException should not occur */ @Test public void testVariableDeclarations2() throws IOException { assertEqual("variableDeclarations2"); } /** * Tests simple script parameter (without script parameter). * * @throws IOException should not occur */ @Test public void testScriptParameter() throws IOException { assertEqual("scriptParameter"); } /** * Tests simple script parameter (with typical script parameter). * * @throws IOException should not occur */ @Ignore("obsolete due to null-parameter") @Test public void testScriptParameter2() throws IOException { assertEqual("scriptParameter2", VilException.ID_RUNTIME_PARAMETER); assertEqualDefaultParam("scriptParameter2"); } /** * Tests simple rule execution (without script parameter). * * @throws IOException should not occur */ @Test public void testSimpleRules() throws IOException { assertEqual("simpleRules"); } /** * Tests simple rule execution (with typical script parameter). * * @throws IOException should not occur */ @Ignore("obsolete due to null-parameter") @Test public void testSimpleRules2() throws IOException { assertEqual("simpleRules2", VilException.ID_RUNTIME_PARAMETER); assertEqualDefaultParam("simpleRules2"); } /** * Tests simple rule execution (nested rule dependency). * * @throws IOException should not occur */ @Test public void testSimpleRules3() throws IOException { assertEqual("simpleRules3"); } /** * Tests simple rule execution (co-occurring rule dependency). * * @throws IOException should not occur */ @Test public void testSimpleRules4() throws IOException { assertEqual("simpleRules4"); } /** * Tests simple rule execution (boolean pre/postcondition). * * @throws IOException should not occur */ @Test public void testSimpleRules5() throws IOException { assertEqual("simpleRules5"); } /** * Tests simple rule execution (boolean pre/postcondition, one failing precondition). * * @throws IOException should not occur */ @Test public void testSimpleRules6() throws IOException { assertEqual("simpleRules6"); } /** * Tests simple rule execution (boolean pre/postcondition, one failing precondition). * * @throws IOException should not occur */ @Test public void testSimpleRules7() throws IOException { assertEqual("simpleRules7"); } /** * Tests the functionality of sequences. * * @throws IOException should not occur */ @Test public void testSequences() throws IOException { assertSelfInstantiate("sequences"); } /** * Tests the functionality of enumerating sequences. * * @throws IOException should not occur */ @Test public void testSequences1() throws IOException { assertSelfInstantiate("sequences1"); } /** * Tests the functionality of a simple map. * * @throws IOException should not occur */ @Test public void testMapSimple() throws IOException { assertSelfInstantiate("mapSimple"); } /** * Tests the functionality of a for loop. * * @throws IOException should not occur */ @Test public void testForIterator() throws IOException { assertSelfInstantiate("forIterator"); } /** * Tests complex generics. * * @throws IOException should not occur */ @Test public void testGenerics() throws IOException { assertSelfInstantiate("generics"); } /** * Tests the functionality of a while loop. * * @throws IOException should not occur */ @Test public void testWhileLoop() throws IOException { assertSelfInstantiate("whileLoop"); } /** * Tests the functionality of a while loop (not entering the loop body). * * @throws IOException should not occur */ @Test public void testWhileLoop1() throws IOException { assertSelfInstantiate("whileLoop1"); } /** * Tests the functionality of a map using an iterator. * * @throws IOException should not occur */ @Test public void testMapIterator() throws IOException { assertSelfInstantiate("mapIterator"); } /** * Tests the functionality of a nested map. * * @throws IOException should not occur */ @Test public void testMapNested() throws IOException { assertSelfInstantiate("mapNested"); } /** * Tests the instantiation of an artifact via a constructor. * * @throws IOException should not occur */ @Test public void testConstructor() throws IOException { assertSelfInstantiate("constructor"); } /** * Tests a simple double-sided join. * * @throws IOException should not occur */ @Test public void testJoin1() throws IOException { assertSelfInstantiate("join1"); } /** * Tests a simple double-sided join with condition. * * @throws IOException should not occur */ @Test public void testJoin2() throws IOException { assertSelfInstantiate("join2"); } /** * Tests a simple left-sided join with condition. * * @throws IOException should not occur */ @Test public void testJoin3() throws IOException { assertSelfInstantiate("join3"); } /** * Tests a simple right-sided join with condition (consider that just * the names not the paths shall be equal). * * @throws IOException should not occur */ @Test public void testJoin4() throws IOException { assertSelfInstantiate("join4"); } /** * The execution of a protected rule. * * @throws IOException should not occur */ @Test public void testProtected() throws IOException { assertSelfInstantiate("protectedRules"); // the same as before, just explicitly using the default start rule assertSelfInstantiate("protectedRules", MAIN_RULE); // shall not work, protected assertSelfInstantiate("protectedRules", "compile", VilException.ID_RUNTIME_STARTRULE); // shall not work, does not exist assertSelfInstantiate("protectedRules", "bla", VilException.ID_RUNTIME_STARTRULE); } /** * The execution of VTL templates. * * @throws IOException should not occur */ @Test public void testVtl() throws IOException { assertSelfInstantiate("vtl", MAIN_RULE, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "test.txt"); File expected = new File(base, "templates/test.vtl.expected"); assertFileEqualitySafe(file, expected); file = new File(base, "test1.txt"); assertFileEqualitySafe(file, expected); file = new File(base, "init_res.sql"); expected = new File(base, "init.sql.expected"); assertFileEqualitySafe(file, expected); file = new File(base, "init1_res.sql"); expected = new File(base, "init1.sql.expected"); assertFileEqualitySafe(file, expected); expected = new File(base, "templates/test1.vtl.expected"); file = new File(base, "test_1.txt"); assertFileEqualitySafe(file, expected); expected = new File(base, "templates/test4.vtl.expected"); file = new File(base, "test4.xml"); assertFileEqualitySafe(file, expected); } }); } /** * The execution of VTL templates. * * @throws IOException should not occur */ @Test public void testVtl1() throws IOException { assertSelfInstantiate("vtl1", MAIN_RULE, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "test2.txt"); File expected = new File(base, "templates/test2.vtl.expected"); assertFileEqualitySafe(file, expected); } }); } /** * The execution of a Velocity template. * * @throws IOException should not occur */ @Test public void testFileArtifact() throws IOException { assertSelfInstantiate("FileArtifactTest", MAIN_RULE, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "Mapping.tmp"); File expected = new File(base, "Mapping.tmp.expected"); assertFileEqualitySafe(file, expected); } }); } /** * The execution of conditional rules with explicit dependencies. * * @throws IOException should not occur */ @Test public void testConditionalRules() throws IOException { assertSelfInstantiate("conditionalRules"); } /** * The execution of conditional rules with implicit dependencies. * * @throws IOException should not occur */ @Test public void testConditionalRules2() throws IOException { assertSelfInstantiate("conditionalRules2"); } /** * The execution of zip operations. * * @throws IOException should not occur */ @Test public void testZip() throws IOException { assertSelfInstantiate("zip"); } /** * The use of a temporary folder. * * @throws IOException should not occur */ @Test public void testTmpFolder() throws IOException { assertSelfInstantiate("tmpFolder"); } /** * Tests simple operations on the textual representation. * * @throws IOException should not occur */ @Test public void testText() throws IOException { assertSelfInstantiate("text"); } /** * Tests implicit variables. * * @throws IOException should not occur */ @Test public void testImplicitVars() throws IOException { assertSelfInstantiate("implicitVars"); } /** * Tests the instantiation command. However, we cannot test * all variants of the call as currently the main script cannot * easily be determined in tests (projects are fed in as files). * * @throws IOException should not occur */ @Test public void testInstantiate() throws IOException { assertSelfInstantiate("instantiate"); } /** * Tests a main rule call to a base PL from an extended PL (contributed by Sascha). * * @throws IOException should not occur */ @Test public void testExtendedPL() throws IOException { assertSelfInstantiate("extendedPL"); } /** * Tests type selects (actually a part of an existing test used for debugging). * * @throws IOException should not occur */ @Test public void testTypeSelect() throws IOException { assertSelfInstantiate("typeSelect"); } /** * Tests creation of a new text file (inspired by S. Bender). * * @throws IOException should not occur */ @Test public void testNewText() throws IOException { assertSelfInstantiate("newText", MAIN_RULE, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "newText.txt"); File expected = new File(base, "newText.txt.expected"); assertFileEqualitySafe(file, expected); } }); } /** * Tests the selection of the right VTL script from a VIL script (contributed by Sebastian Bender). * The correct scripts are nested in the <code>PL_3</code> folder, but the same are also present * in the <code>PL_2</code> folder * * @throws IOException should not occur */ @Test public void testPL3() throws IOException { assertSelfInstantiate("PL_3/EASy/PL_3"); } /** * Tests some XML cases. * * @throws IOException should not occur */ @Test public void testXML1() throws IOException { assertSelfInstantiate("xml1"); // file not created } /** * Tests some XML cases. * * @throws IOException should not occur */ @Test public void testXML2() throws IOException { assertSelfInstantiate("xml2", MAIN_RULE, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "xml2File.xml"); File expected = new File(base, "xml2File.xml.expected"); assertFileEqualitySafe(file, expected); } @Override public void deleteBetween(File base) { File file = new File(base, "xml2File.xml"); try { ArtifactFactory.createArtifact(file).delete(); } catch (VilException e) { e.printStackTrace(); } } }); } /** * Tests the rename operation for file artifacts. * * @throws IOException should not occur */ @Test public void testRename() throws IOException { assertSelfInstantiate("rename", MAIN_RULE, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "rename.txt"); Assert.assertFalse(file.exists()); file = new File(base, "rename_0.txt"); Assert.assertTrue(file.exists()); } @Override public void deleteBetween(File base) { // reset test situation File fileAfter = new File(base, "rename_0.txt"); File fileBefore = new File(base, "rename.txt"); try { ArtifactFactory.createArtifact(fileAfter).rename(fileBefore.getPath()); } catch (VilException e) { e.printStackTrace(); } } }); } /** * Tests the rename operation for file artifacts. * * @throws IOException should not occur */ @Test public void testRename1() throws IOException { assertSelfInstantiate("rename1", MAIN_RULE, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "rename1.txt"); Assert.assertFalse(file.exists()); file = new File(base, "rename_1.txt"); Assert.assertTrue(file.exists()); } @Override public void deleteBetween(File base) { // reset test situation File fileAfter = new File(base, "rename_1.txt"); File fileBefore = new File(base, "rename1.txt"); try { ArtifactFactory.createArtifact(fileAfter).rename(fileBefore.getPath()); } catch (VilException e) { e.printStackTrace(); } } }); } /** * Tests auto-storing artifacts (rename after). * * @throws IOException should not occur */ @Test public void testStoreArtifact1() throws IOException { assertSelfInstantiate("storeArtifact1", MAIN_RULE, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "renStoreArtifact1.txt"); Assert.assertTrue(file.exists()); File expected = new File(base, "renStoreArtifact1.txt.expected"); assertFileEqualitySafe(file, expected); } @Override public void deleteBetween(File base) { // reset test situation File file = new File(base, "renStoreArtifact1.txt"); try { ArtifactFactory.createArtifact(file).delete(); } catch (VilException e) { e.printStackTrace(); } } }); } /** * Tests auto-storing artifacts (rename after). * * @throws IOException should not occur */ @Test public void testStoreArtifact2() throws IOException { assertSelfInstantiate("storeArtifact2", MAIN_RULE, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "storeArtifact2.txt"); Assert.assertTrue(file.exists()); File expected = new File(base, "storeArtifact2.txt.expected"); assertFileEqualitySafe(file, expected); } @Override public void deleteBetween(File base) { // reset test situation File file = new File(base, "storeArtifact2.txt"); try { ArtifactFactory.createArtifact(file).delete(); } catch (VilException e) { e.printStackTrace(); } } }); } /** * Tests some VTL cases (passing IVML instances). * * @throws IOException should not occur */ @Test public void testVtl2() throws IOException { assertSelfInstantiate("vtl2", MAIN_RULE, "adviceTestVIL1", new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "test3.txt"); File expected = new File(base, "templates/test3.vtl.expected"); assertFileEqualitySafe(file, expected); } }); } /** * Tests some VTL cases (passing IVML instances of convertible types, contributed by QualiMaster). * * @throws IOException should not occur */ @Test public void testVtl3() throws IOException { assertSelfInstantiate("vtl3", MAIN_RULE, "vtl3", new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File file = new File(base, "test3.txt"); File expected = new File(base, "templates/vtl3.vtl.expected"); assertFileEqualitySafe(file, expected); } }); } /** * Tests some VTL cases (re-using VTL in VIL). * * @throws IOException should not occur */ @Test public void testVtl4() throws IOException { assertSelfInstantiate("vtl4", MAIN_RULE, "expressionTest", null); } /** * Tests some VTL cases (renaming artifact in VIL). * * @throws IOException should not occur */ @Test public void testVtl5() throws IOException { assertSelfInstantiate("vtl5", MAIN_RULE, null, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File renamedFile = new File(base, "vtl5.tip"); Assert.assertTrue("Error: File was not renamed \"" + renamedFile.getAbsolutePath() + "\"", renamedFile.exists()); } }); } /** * Tests whether files are copied recursively if a file pattern was used. * @throws IOException should not occur */ @Test public void testRecursiveCopyOnInclusionPattern() throws IOException { assertSelfInstantiate("RecursiveCopyTestProject/RecursiceCopyIncludePattern", MAIN_RULE, null, new SelfInstantiationAsserterAdapter() { @Override public void assertIn(File base) { File fileToCopy = new File(base, "trgForRecursiveCopy/base/FileToCopy.txt"); Assert.assertTrue("Error: File was not recursively copied to \"" + fileToCopy.getAbsolutePath() + "\"", fileToCopy.exists()); File fileToBeIgnored = new File(base, "trgForRecursiveCopy/ignored/ToBeIgnored.txt"); Assert.assertFalse("Error: File was copied to \"" + fileToBeIgnored.getAbsolutePath() + "\", but this must not happen.", fileToBeIgnored.exists()); } }); } /** * Tests some VTL cases (passing IVML instances). * * @throws IOException should not occur */ @Test public void testNull() throws IOException { assertSelfInstantiate("nullTest", MAIN_RULE, "nullTest", null); } /** * Tests returning collections. * * @throws IOException should not occur */ @Test public void testCollectionReturn() throws IOException { assertSelfInstantiate("collectionReturnTest", MAIN_RULE, "nullTest", null); } /** * Tests returning collections. * * @throws IOException should not occur */ @Test public void testCollectionReturn2() throws IOException { assertSelfInstantiate("collectionReturnTest2", MAIN_RULE, "nullTest", null); } /** * Tests defining a boolean rule based on a Boolean IVML element. * * @throws IOException should not occur */ @Test public void testImplicitConversion() throws IOException { assertSelfInstantiate("implicitConversion", MAIN_RULE, "IC", null); } /** * Tests defining a boolean rule based on a Boolean IVML element. * * @throws IOException should not occur */ @Test public void testBooleanRule() throws IOException { assertSelfInstantiate("booleanRule", MAIN_RULE, "booleanRule", null); } /** * Tests boolean preconditions via IVML. * * @throws IOException should not occur */ @Test public void testBooleanRule2() throws IOException { assertSelfInstantiate("booleanRule2", MAIN_RULE, "booleanRule2", null); } /** * Tests defining default parameters. * * @throws IOException should not occur */ @Test public void testRules2() throws IOException { assertEqual("rules2"); } /** * Tests defining default script parameters. * * @throws IOException should not occur */ @Test public void testRules4() throws IOException { assertEqual("rules4"); } /** * Tests implicit casting. * * @throws IOException should not occur */ @Test public void testCast1() throws IOException { assertSelfInstantiate("cast1", MAIN_RULE, "cast1", null); } /** * Tests type select over IVML types. * * @throws IOException should not occur */ @Test public void testTypeSelect2() throws IOException { assertSelfInstantiate("typeSelect2", MAIN_RULE, "typeSelectTest", null); } /** * Tests type select over IVML types. * * @throws IOException should not occur */ @Test public void testTypeSelect3() throws IOException { assertSelfInstantiate("typeSelect3", MAIN_RULE, "typeSelectTest", null); } /** * Tests the all instances operation. * * @throws IOException should not occur */ @Test public void testAllInstances() throws IOException { assertSelfInstantiate("allInstances", MAIN_RULE, "typeSelectTest", null); } /** * Tests type operations with advice. * * @throws IOException should not occur */ @Test public void testTypes() throws IOException { assertSelfInstantiate("types", MAIN_RULE, "typeSelectTest", null); } /** * Tests type operations without advice. * * @throws IOException should not occur */ @Test public void testTypes2() throws IOException { assertSelfInstantiate("types2", MAIN_RULE, "typeSelectTest", null); } /** * Tests the copy "results". * * @throws IOException should not occur */ @Test public void testCopy() throws IOException { assertSelfInstantiate("copy", MAIN_RULE, null, null); } /** * Tests the copy "results". * * @throws IOException should not occur */ @Test public void testCopy2() throws IOException { assertSelfInstantiate("copy2", MAIN_RULE, null, null); } /** * Tests defining two simple graphs and calculating their closure and whether they are acyclic. * * @throws IOException should not occur */ @Test public void testGraph1() throws IOException { assertSelfInstantiate("graph1", MAIN_RULE, "graph1", null); } /** * Tests enums and enum types. * * @throws IOException should not occur */ @Test public void testEnum() throws IOException { assertSelfInstantiate("enumTest", MAIN_RULE, "enumTest", null); } /** * Tests chained expressions against IVML. * * @throws IOException should not occur */ @Test public void testExpression() throws IOException { assertSelfInstantiate("expressionTest", MAIN_RULE, "expressionTest", null); } /** * Tests sorting. * * @throws IOException should not occur */ @Test public void testSorting() throws IOException { assertSelfInstantiate("sorting", MAIN_RULE, "sorting", null); } /** * Refined compound slots. * * @throws IOException should not occur */ @Test public void testCompounds3() throws IOException { assertSelfInstantiate("compounds3", MAIN_RULE, "compounds3", null); } /** * Dynamic dispatch. * * @throws IOException should not occur */ @Test public void testCarShop() throws IOException { assertSelfInstantiate("carShop", MAIN_RULE, "CarShop", null); } }
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008-2012. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package org.lunatecs316.frc2014.lib; import edu.wpi.first.wpilibj.AccumulatorResult; import edu.wpi.first.wpilibj.AnalogChannel; import edu.wpi.first.wpilibj.PIDSource; import edu.wpi.first.wpilibj.SensorBase; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.communication.UsageReporting; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.livewindow.LiveWindowSendable; import edu.wpi.first.wpilibj.parsing.ISensor; import edu.wpi.first.wpilibj.tables.ITable; /** * Use a rate gyro to return the robots heading relative to a starting position. * The Gyro class tracks the robots heading based on the starting position. As the robot * rotates the new heading is computed by integrating the rate of rotation returned * by the sensor. When the class is instantiated, it does a short calibration routine * where it samples the gyro while at rest to determine the default offset. This is * subtracted from each sample to determine the heading. */ public class Gyro extends SensorBase implements PIDSource, ISensor, LiveWindowSendable { static final int kOversampleBits = 10; static final int kAverageBits = 0; static final double kSamplesPerSecond = 50.0; static final double kCalibrationSampleTime = 5.0; static final double kDefaultVoltsPerDegreePerSecond = 0.007; AnalogChannel m_analog; double m_voltsPerDegreePerSecond; double m_offset; int m_center; boolean m_channelAllocated; AccumulatorResult result; /** * Initialize the gyro. * Calibrate the gyro by running for a number of samples and computing the center value for this * part. Then use the center value as the Accumulator center value for subsequent measurements. * It's important to make sure that the robot is not moving while the centering calculations are * in progress, this is typically done when the robot is first turned on while it's sitting at * rest before the competition starts. * * The WPILib implementation has this method private. We change it to public so that the Gyro * can be recalibrated. */ public void initGyro() { result = new AccumulatorResult(); if (m_analog == null) { System.out.println("Null m_analog"); } m_voltsPerDegreePerSecond = kDefaultVoltsPerDegreePerSecond; m_analog.setAverageBits(kAverageBits); m_analog.setOversampleBits(kOversampleBits); double sampleRate = kSamplesPerSecond * (1 << (kAverageBits + kOversampleBits)); m_analog.getModule().setSampleRate(sampleRate); Timer.delay(1.0); m_analog.initAccumulator(); Timer.delay(kCalibrationSampleTime); m_analog.getAccumulatorOutput(result); m_center = (int) ((double)result.value / (double)result.count + .5); m_offset = ((double)result.value / (double)result.count) - (double)m_center; m_analog.setAccumulatorCenter(m_center); m_analog.setAccumulatorDeadband(0); ///< TODO: compute / parameterize this m_analog.resetAccumulator(); UsageReporting.report(UsageReporting.kResourceType_Gyro, m_analog.getChannel(), m_analog.getModuleNumber()-1); LiveWindow.addSensor("Gyro", m_analog.getModuleNumber(), m_analog.getChannel(), this); } /** * Gyro constructor given a slot and a channel. . * @param slot The cRIO slot for the analog module the gyro is connected to. * @param channel The analog channel the gyro is connected to. */ public Gyro(int slot, int channel) { m_analog = new AnalogChannel(slot, channel); m_channelAllocated = true; initGyro(); } /** * Gyro constructor with only a channel. * * Use the default analog module slot. * * @param channel The analog channel the gyro is connected to. */ public Gyro(int channel) { m_analog = new AnalogChannel(channel); m_channelAllocated = true; initGyro(); } /** * Gyro constructor with a precreated analog channel object. * Use this constructor when the analog channel needs to be shared. There * is no reference counting when an AnalogChannel is passed to the gyro. * @param channel The AnalogChannel object that the gyro is connected to. */ public Gyro(AnalogChannel channel) { m_analog = channel; if (m_analog == null) { System.err.println("Analog channel supplied to Gyro constructor is null"); } else { m_channelAllocated = false; initGyro(); } } /** * Reset the gyro. * Resets the gyro to a heading of zero. This can be used if there is significant * drift in the gyro and it needs to be recalibrated after it has been running. */ public void reset() { if (m_analog != null) { m_analog.resetAccumulator(); } } /** * Delete (free) the accumulator and the analog components used for the gyro. */ public void free() { if (m_analog != null && m_channelAllocated) { m_analog.free(); } m_analog = null; } /** * Return the actual angle in degrees that the robot is currently facing. * * The angle is based on the current accumulator value corrected by the oversampling rate, the * gyro type and the A/D calibration values. * The angle is continuous, that is can go beyond 360 degrees. This make algorithms that wouldn't * want to see a discontinuity in the gyro output as it sweeps past 0 on the second time around. * * @return the current heading of the robot in degrees. This heading is based on integration * of the returned rate from the gyro. */ public double getAngle() { if (m_analog == null) { return 0.0; } else { m_analog.getAccumulatorOutput(result); long value = result.value - (long) (result.count * m_offset); double scaledValue = value * 1e-9 * m_analog.getLSBWeight() * (1 << m_analog.getAverageBits()) / (m_analog.getModule().getSampleRate() * m_voltsPerDegreePerSecond); return scaledValue; } } /** * Return the rate of rotation of the gyro * * The rate is based on the most recent reading of the gyro analog value * * @return the current rate in degrees per second */ public double getRate() { if(m_analog == null) { return 0.0; } else { return (m_analog.getAverageValue() - ((double)m_center + m_offset)) * 1e-9 * m_analog.getLSBWeight() / ((1 << m_analog.getOversampleBits()) * m_voltsPerDegreePerSecond); } } /** * Set the gyro type based on the sensitivity. * This takes the number of volts/degree/second sensitivity of the gyro and uses it in subsequent * calculations to allow the code to work with multiple gyros. * * @param voltsPerDegreePerSecond The type of gyro specified as the voltage that represents one degree/second. */ public void setSensitivity(double voltsPerDegreePerSecond) { m_voltsPerDegreePerSecond = voltsPerDegreePerSecond; } /** * Get the angle of the gyro for use with PIDControllers * @return the current angle according to the gyro */ public double pidGet() { return getAngle(); } /* * Live Window code, only does anything if live window is activated. */ public String getSmartDashboardType(){ return "Gyro"; } private ITable m_table; /** * {@inheritDoc} */ public void initTable(ITable subtable) { m_table = subtable; updateTable(); } /** * {@inheritDoc} */ public ITable getTable(){ return m_table; } /** * {@inheritDoc} */ public void updateTable() { if (m_table != null) { m_table.putNumber("Value", getAngle()); } } /** * {@inheritDoc} */ public void startLiveWindowMode() {} /** * {@inheritDoc} */ public void stopLiveWindowMode() {} }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.integration; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.Config; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.config.ConfigResource; import org.apache.kafka.common.config.TopicConfig; import org.apache.kafka.common.requests.DescribeLogDirsResponse; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Time; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster; import org.apache.kafka.streams.integration.utils.IntegrationTestUtils; import org.apache.kafka.test.IntegrationTest; import org.apache.kafka.test.MockMapper; import org.apache.kafka.test.TestCondition; import org.apache.kafka.test.TestUtils; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import java.time.Duration; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.Set; @Category({IntegrationTest.class}) public class PurgeRepartitionTopicIntegrationTest { private static final int NUM_BROKERS = 1; private static final String INPUT_TOPIC = "input-stream"; private static final String APPLICATION_ID = "restore-test"; private static final String REPARTITION_TOPIC = APPLICATION_ID + "-KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition"; private static AdminClient adminClient; private static KafkaStreams kafkaStreams; private static final Integer PURGE_INTERVAL_MS = 10; private static final Integer PURGE_SEGMENT_BYTES = 2000; @ClassRule public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS, new Properties() { { put("log.retention.check.interval.ms", PURGE_INTERVAL_MS); put(TopicConfig.FILE_DELETE_DELAY_MS_CONFIG, 0); } }); private final Time time = CLUSTER.time; private class RepartitionTopicCreatedWithExpectedConfigs implements TestCondition { @Override final public boolean conditionMet() { try { final Set<String> topics = adminClient.listTopics().names().get(); if (!topics.contains(REPARTITION_TOPIC)) { return false; } } catch (final Exception e) { return false; } try { final ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, REPARTITION_TOPIC); final Config config = adminClient .describeConfigs(Collections.singleton(resource)) .values() .get(resource) .get(); return config.get(TopicConfig.CLEANUP_POLICY_CONFIG).value().equals(TopicConfig.CLEANUP_POLICY_DELETE) && config.get(TopicConfig.SEGMENT_MS_CONFIG).value().equals(PURGE_INTERVAL_MS.toString()) && config.get(TopicConfig.SEGMENT_BYTES_CONFIG).value().equals(PURGE_SEGMENT_BYTES.toString()); } catch (final Exception e) { return false; } } } private interface TopicSizeVerifier { boolean verify(long currentSize); } private class RepartitionTopicVerified implements TestCondition { private final TopicSizeVerifier verifier; RepartitionTopicVerified(final TopicSizeVerifier verifier) { this.verifier = verifier; } @Override public final boolean conditionMet() { time.sleep(PURGE_INTERVAL_MS); try { final Collection<DescribeLogDirsResponse.LogDirInfo> logDirInfo = adminClient.describeLogDirs(Collections.singleton(0)).values().get(0).get().values(); for (final DescribeLogDirsResponse.LogDirInfo partitionInfo : logDirInfo) { final DescribeLogDirsResponse.ReplicaInfo replicaInfo = partitionInfo.replicaInfos.get(new TopicPartition(REPARTITION_TOPIC, 0)); if (replicaInfo != null && verifier.verify(replicaInfo.size)) { return true; } } } catch (final Exception e) { // swallow } return false; } } @BeforeClass public static void createTopics() throws Exception { CLUSTER.createTopic(INPUT_TOPIC, 1, 1); } @Before public void setup() { // create admin client for verification final Properties adminConfig = new Properties(); adminConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); adminClient = AdminClient.create(adminConfig); final Properties streamsConfiguration = new Properties(); streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, APPLICATION_ID); streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, PURGE_INTERVAL_MS); streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory(APPLICATION_ID).getPath()); streamsConfiguration.put(StreamsConfig.topicPrefix(TopicConfig.SEGMENT_MS_CONFIG), PURGE_INTERVAL_MS); streamsConfiguration.put(StreamsConfig.topicPrefix(TopicConfig.SEGMENT_BYTES_CONFIG), PURGE_SEGMENT_BYTES); streamsConfiguration.put(StreamsConfig.producerPrefix(ProducerConfig.BATCH_SIZE_CONFIG), PURGE_SEGMENT_BYTES / 2); // we cannot allow batch size larger than segment size streamsConfiguration.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true); final StreamsBuilder builder = new StreamsBuilder(); builder.stream(INPUT_TOPIC) .groupBy(MockMapper.selectKeyKeyValueMapper()) .count(); kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration, time); } @After public void shutdown() { if (kafkaStreams != null) { kafkaStreams.close(Duration.ofSeconds(30)); } } @Test public void shouldRestoreState() throws Exception { // produce some data to input topic final List<KeyValue<Integer, Integer>> messages = new ArrayList<>(); for (int i = 0; i < 1000; i++) { messages.add(new KeyValue<>(i, i)); } IntegrationTestUtils.produceKeyValuesSynchronouslyWithTimestamp(INPUT_TOPIC, messages, TestUtils.producerConfig(CLUSTER.bootstrapServers(), IntegerSerializer.class, IntegerSerializer.class), time.milliseconds()); kafkaStreams.start(); TestUtils.waitForCondition(new RepartitionTopicCreatedWithExpectedConfigs(), 60000, "Repartition topic " + REPARTITION_TOPIC + " not created with the expected configs after 60000 ms."); TestUtils.waitForCondition( new RepartitionTopicVerified(currentSize -> currentSize > 0), 60000, "Repartition topic " + REPARTITION_TOPIC + " not received data after 60000 ms." ); // we need long enough timeout to by-pass the log manager's InitialTaskDelayMs, which is hard-coded on server side TestUtils.waitForCondition( new RepartitionTopicVerified(currentSize -> currentSize <= PURGE_SEGMENT_BYTES), 60000, "Repartition topic " + REPARTITION_TOPIC + " not purged data after 60000 ms." ); } }
package io.github.mike10004.seleniumcapture; import com.browserup.bup.BrowserUpProxy; import com.browserup.bup.BrowserUpProxyServer; import com.browserup.bup.mitm.CertificateAndKeySource; import org.littleshoot.proxy.HttpFiltersSource; import org.openqa.selenium.WebDriverException; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.function.Supplier; import static java.util.Objects.requireNonNull; /** * Interface of a service that enables collection of HTTP traffic driven by a web-browsing session. */ public interface TrafficCollector { /** * Collects traffic generated by the given generator into a HAR. This invokes * {@link #collect(TrafficGenerator, TrafficMonitor)} with a null monitor reference. * @param generator the generator * @param <R> type of result the generator returns * @return the HAR containing all traffic generated * @throws IOException if something I/O related goes awry * @throws WebDriverException if the web driver could not be created or the generator throws one */ <R> HarPlus<R> collect(TrafficGenerator<R> generator) throws IOException, WebDriverException; /** * Collects traffic generated by the given generator into a HAR. Notifications of request/response * interactions can be sent to the given monitor, optionally. * @param generator the generator * @param <R> type of result the generator returns * @param monitor a monitor, or null * @return the HAR containing all traffic generated * @throws IOException on I/O error * @throws WebDriverException if the web driver could not be created or the generator throws one */ <R> HarPlus<R> collect(TrafficGenerator<R> generator, @Nullable TrafficMonitor monitor) throws IOException, WebDriverException; /** * Monitors traffic generated by the given generator. This like * {@link #collect(TrafficGenerator, TrafficMonitor)} but without capturing a HAR. * @param generator the traffic generator * @param monitor the monitor * @param <R> generator result type * @return the generator result * @throws IOException on I/O error * @throws WebDriverException on webdriver error */ <R> R monitor(TrafficGenerator<R> generator, TrafficMonitor monitor) throws IOException, WebDriverException; /** * Causes traffic to be generated by the given generator and returns the result. * This is essentially {@link #monitor(TrafficGenerator, TrafficMonitor)} without * notifying a monitor of request/response interactions. * @param generator the traffic generator * @param <R> generator result type * @return the generator result * @throws IOException on I/O error * @throws WebDriverException on webdriver error */ <R> R drive(TrafficGenerator<R> generator) throws IOException, WebDriverException; /** * Returns a new builder instance. * @param webDriverFactory the webdriver factory * @return a builder */ static TrafficCollectorImpl.Builder builder(WebDriverFactory webDriverFactory) { return new TrafficCollectorImpl.Builder(webDriverFactory); } /** * Builder of traffic collector instances. */ final class Builder { private final WebDriverFactory webDriverFactory; private CertificateAndKeySource certificateAndKeySource = null; private final List<HttpFiltersSource> httpFiltersSources = new ArrayList<>(); private InterceptedWebdrivingConfigurator upstreamConfigurator = defaultInterceptedWebdrivingConfigurator(); private Supplier<? extends BrowserUpProxy> interceptingProxyInstantiator = BrowserUpProxyServer::new; private final List<HarPostProcessor> harPostProcessors = new ArrayList<>(); private ExceptionReactor exceptionReactor = ExceptionReactor.PROPAGATE; Builder(WebDriverFactory webDriverFactory) { this.webDriverFactory = requireNonNull(webDriverFactory); httpFiltersSources.add(AnonymizingFiltersSource.getInstance()); } /** * Configures the collector to react to exceptions during collection with the specified service instance. * By default, exceptions are re-thrown. * @param exceptionReactor the reactor service * @return this builder instance */ public Builder onException(ExceptionReactor exceptionReactor) { this.exceptionReactor = requireNonNull(exceptionReactor); return this; } private static InterceptedWebdrivingConfigurator defaultInterceptedWebdrivingConfigurator() { return new BasicInterceptedWebdrivingConfigurator(NoProxyDefinition.noUpstreamProxy(), HostBypassRuleFactory.createDefault(), Collections.emptyList()); } /** * Configures the collector to collect HTTPS traffic by accepting TLS socket connections * using the given trust data. * @param certificateAndKeySource the certificate and key source * @return this builder instance */ public Builder collectHttps(CertificateAndKeySource certificateAndKeySource) { this.certificateAndKeySource = requireNonNull(certificateAndKeySource); return this; } /** * Sets the supplier of the proxy server instance that is used to intercept and collect traffic. * The default is a supplier of the proxy that uses the default constructor. * @param interceptingProxyInstantiator the instantiator * @return this builder instance */ public Builder interceptingProxyInstantiator(Supplier<? extends BrowserUpProxy> interceptingProxyInstantiator) { this.interceptingProxyInstantiator = requireNonNull(interceptingProxyInstantiator); return this; } /** * Configures the collector not to remove headers that indicate traffic is passing through a proxy. * By default, the intercepting proxy does not add/remove/change any headers, and this negates that * behavior. * @return this builder instance */ public Builder nonAnonymizing() { httpFiltersSources.remove(AnonymizingFiltersSource.getInstance()); return this; } /** * Adds a filter source to the list of filter sources. * @param filter a filter source * @return this builder instance */ public Builder filter(HttpFiltersSource filter) { httpFiltersSources.add(filter); return this; } /** * Adds all argument filters sources to this builder's filters list. * @param val the filters sources to add * @return this builder instance */ public Builder filters(Collection<? extends HttpFiltersSource> val) { httpFiltersSources.addAll(val); return this; } /** * Configures the collector instance not to use an upstream proxy. * This is the default. * @return this builder instance */ public Builder noUpstreamProxy(List<String> webdrivingBypassList) { return upstreamProxy(new BasicInterceptedWebdrivingConfigurator(NoProxyDefinition.noUpstreamProxy(), HostBypassRuleFactory.createDefault(), webdrivingBypassList)); } private Builder upstreamProxy(InterceptedWebdrivingConfigurator configurator) { this.upstreamConfigurator = requireNonNull(configurator); return this; } /** * Configures the collector to use an upstream proxy. * @param proxySpecification * @return this builder instance */ @SuppressWarnings("UnusedReturnValue") public Builder upstreamProxy(UpstreamProxyDefinition proxySpecification, List<String> webdrivingProxyBypassList) { return upstreamProxy(proxySpecification, HostBypassRuleFactory.createDefault(), webdrivingProxyBypassList); } public Builder upstreamProxy(UpstreamProxyDefinition proxySpecification, HostBypassRuleFactory hostBypassRuleFactory, List<String> webdrivingProxyBypassList) { requireNonNull(proxySpecification); this.upstreamConfigurator = new BasicInterceptedWebdrivingConfigurator(proxySpecification, hostBypassRuleFactory, webdrivingProxyBypassList); return this; } /** * Configures the collector to post-process the captured HAR file with the given instance. * @param harPostProcessor * @return this builder instance */ public Builder harPostProcessor(HarPostProcessor harPostProcessor) { harPostProcessors.add(requireNonNull(harPostProcessor, "harPostProcessor")); return this; } /** * Builds a collector instance. * @return a new collector instance */ public TrafficCollector build() { return new TrafficCollectorImpl(webDriverFactory, certificateAndKeySource, upstreamConfigurator, httpFiltersSources, interceptingProxyInstantiator, harPostProcessors, exceptionReactor); } } }
package org.realityforge.gwt.websockets.example.client; import com.google.gwt.core.client.EntryPoint; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.DivElement; import com.google.gwt.dom.client.Document; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.typedarrays.shared.ArrayBuffer; import com.google.gwt.typedarrays.shared.Int8Array; import com.google.gwt.typedarrays.shared.TypedArrays; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.CheckBox; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.RootPanel; import com.google.gwt.user.client.ui.ScrollPanel; import com.google.gwt.user.client.ui.TextBox; import java.util.logging.Logger; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.realityforge.gwt.websockets.client.WebSocket; import org.realityforge.gwt.websockets.client.WebSocketListener; public final class Example implements EntryPoint, WebSocketListener { private static final Logger LOG = Logger.getLogger( Example.class.getName() ); private HTML _messages; private ScrollPanel _scrollPanel; private Button _disconnect; private Button _connect; private Button _send; private CheckBox _subProtocol; public void onModuleLoad() { final WebSocket webSocket = WebSocket.newWebSocketIfSupported(); if ( null == webSocket ) { Window.alert( "WebSocket not available!" ); } else { webSocket.setListener( this ); final TextBox input = new TextBox(); input.setValue( "Greetings!" ); final CheckBox checkBox = new CheckBox( "Binary?" ); _subProtocol = new CheckBox( "Use sub-protocol?" ); _connect = new Button( "Connect", new ClickHandler() { @Override public void onClick( final ClickEvent event ) { _connect.setEnabled( false ); _subProtocol.setEnabled( false ); if ( Boolean.TRUE == _subProtocol.getValue() ) { webSocket.connect( getWebSocketURL(), "superchat" ); } else { webSocket.connect( getWebSocketURL() ); } } } ); _disconnect = new Button( "Disconnect", new ClickHandler() { @Override public void onClick( ClickEvent event ) { webSocket.close(); _disconnect.setEnabled( false ); } } ); _disconnect.setEnabled( false ); _send = new Button( "Send", new ClickHandler() { @Override public void onClick( ClickEvent event ) { send( webSocket, input.getValue(), Boolean.TRUE == checkBox.getValue() ); } } ); _send.setEnabled( false ); _messages = new HTML(); _scrollPanel = new ScrollPanel(); _scrollPanel.setHeight( "250px" ); _scrollPanel.add( _messages ); RootPanel.get().add( _scrollPanel ); { final FlowPanel controls = new FlowPanel(); controls.add( _connect ); controls.add( _disconnect ); controls.add( _subProtocol ); RootPanel.get().add( controls ); } { final FlowPanel controls = new FlowPanel(); controls.add( input ); controls.add( checkBox ); controls.add( _send ); RootPanel.get().add( controls ); } } } private String getWebSocketURL() { final String moduleBaseURL = GWT.getHostPageBaseURL(); return moduleBaseURL.replaceFirst( "^http\\:", "ws:" ) + "chat"; } private void send( final WebSocket webSocket, final String message, final boolean binary ) { if ( binary ) { final Int8Array arrayBuffer = TypedArrays.createInt8Array( message.length() ); arrayBuffer.set( message.getBytes() ); webSocket.send( arrayBuffer ); } else { webSocket.send( message ); } } public void onMessage( @Nonnull final WebSocket webSocket, @Nonnull final ArrayBuffer data ) { logStatus( "Message", webSocket ); final Int8Array arrayBuffer = TypedArrays.createInt8Array( data ); final StringBuilder sb = new StringBuilder(); for ( int i = 0; i < arrayBuffer.length(); i++ ) { sb.append( (char) arrayBuffer.get( i ) ); } appendText( "binary message: " + sb, "black" ); } public void onMessage( @Nonnull final WebSocket webSocket, @Nonnull final String textData ) { logStatus( "Message", webSocket ); appendText( "message: " + textData, "black" ); } public void onError( @Nonnull final WebSocket webSocket ) { logStatus( "Error", webSocket ); appendText( "error", "red" ); _connect.setEnabled( false ); _subProtocol.setEnabled( false ); _disconnect.setEnabled( false ); _send.setEnabled( false ); } @Override public void onClose( @Nonnull final WebSocket webSocket, final boolean wasClean, final int code, @Nullable final String reason ) { logStatus( "Close", webSocket ); appendText( "close", "silver" ); _connect.setEnabled( true ); _subProtocol.setEnabled( true ); _disconnect.setEnabled( false ); _send.setEnabled( false ); } public void onOpen( @Nonnull final WebSocket webSocket ) { logStatus( "Open", webSocket ); appendText( "open", "silver" ); _disconnect.setEnabled( true ); _send.setEnabled( true ); } private void logStatus( @Nonnull final String section, @Nonnull final WebSocket webSocket ) { final String suffix = !webSocket.isConnected() ? "" : "URL:" + webSocket.getURL() + "\n" + "BinaryType:" + webSocket.getBinaryType() + "\n" + "BufferedAmount:" + webSocket.getBufferedAmount() + "\n" + "Extensions:" + webSocket.getExtensions() + "\n" + "Protocol:" + webSocket.getProtocol(); LOG.warning( "WebSocket @ " + section + "\n" + "ReadyState:" + webSocket.getReadyState() + "\n" + suffix ); } private void appendText( final String text, final String color ) { final DivElement div = Document.get().createDivElement(); div.setInnerText( text ); div.setAttribute( "style", "color:" + color ); _messages.getElement().appendChild( div ); _scrollPanel.scrollToBottom(); } }
package net.johnewart.gearman.server.persistence; import com.fasterxml.jackson.databind.ObjectMapper; import com.jolbox.bonecp.BoneCP; import com.jolbox.bonecp.BoneCPConfig; import net.johnewart.gearman.constants.JobPriority; import net.johnewart.gearman.common.Job; import net.johnewart.gearman.server.core.QueuedJob; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.sql.*; import java.util.Collection; import java.util.LinkedList; public class PostgresQueue implements PersistenceEngine { private static Logger LOG = LoggerFactory.getLogger(PostgresQueue.class); private final String url; private final String user; private final String password; private BoneCP connectionPool; private final int jobsPerPage =5000; public PostgresQueue(String hostname, int port, String database, String user, String password) { this.url = "jdbc:postgresql://" + hostname + ":" + port + "/" + database; this.user = user; this.password = password; try { BoneCPConfig config = new BoneCPConfig(); config.setJdbcUrl(this.url); config.setUsername(this.user); config.setPassword(this.password); config.setMinConnectionsPerPartition(10); config.setMaxConnectionsPerPartition(20); config.setPartitionCount(1); connectionPool = new BoneCP(config); if(!validateOrCreateTable()) { throw new SQLException("Unable to validate or create jobs table. Check credentials."); } } catch (SQLException se) { se.printStackTrace(); connectionPool = null; } } @Override public String getIdentifier() { String result = url; try { Connection connection = connectionPool.getConnection(); DatabaseMetaData metaData = connection.getMetaData(); int majorVersion, minorVersion; String productName, productVersion; majorVersion = metaData.getDatabaseMajorVersion(); minorVersion = metaData.getDatabaseMinorVersion(); productName = metaData.getDatabaseProductName(); productVersion = metaData.getDatabaseProductVersion(); result = String.format("%s (%s v%d.%d) - %s", productName, productVersion, majorVersion, minorVersion, url); } catch (SQLException e) { e.printStackTrace(); } return result; } @Override public void write(Job job) { PreparedStatement st = null; ResultSet rs = null; Connection conn = null; ObjectMapper mapper = new ObjectMapper(); try { conn = connectionPool.getConnection(); if(conn != null) { String jobJSON = mapper.writeValueAsString(job); // Update an existing job if one exists based on unique id st = conn.prepareStatement("UPDATE jobs SET job_handle = ?, priority = ?, time_to_run = ?, json_data = ? WHERE unique_id = ? AND function_name = ?"); st.setString(1, job.getJobHandle()); st.setString(2, job.getPriority().toString()); st.setLong (3, job.getTimeToRun()); st.setString(4, jobJSON); st.setString(5, job.getUniqueID()); st.setString(6, job.getFunctionName()); int updated = st.executeUpdate(); // No updates, insert a new record. if(updated == 0) { st = conn.prepareStatement("INSERT INTO jobs (unique_id, function_name, time_to_run, priority, job_handle, json_data) VALUES (?, ?, ?, ?, ?, ?)"); st.setString(1, job.getUniqueID()); st.setString(2, job.getFunctionName()); st.setLong(3, job.getTimeToRun()); st.setString(4, job.getPriority().toString()); st.setString(5, job.getJobHandle()); st.setString(6, jobJSON); int inserted = st.executeUpdate(); LOG.debug("Inserted " + inserted + " records for UUID " + job.getUniqueID()); } } } catch (SQLException se) { se.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { try { if(st != null) st.close(); if(conn != null) conn.close(); } catch (SQLException innerEx) { } } } @Override public void delete(Job job) { } @Override public void delete(String functionName, String uniqueID) { PreparedStatement st = null; Connection conn = null; try { conn = connectionPool.getConnection(); if(conn != null) { st = conn.prepareStatement("DELETE FROM jobs WHERE function_name = ? AND unique_id = ?"); st.setString(1, functionName); st.setString(2, uniqueID); int deleted = st.executeUpdate(); LOG.debug("Deleted " + deleted + " records for " + functionName + "/" +uniqueID); } } catch (SQLException se) { } finally { try { if(st != null) st.close(); if(conn != null) conn.close(); } catch (SQLException innerEx) { } } } @Override public void deleteAll() { Statement st = null; ResultSet rs = null; Connection conn = null; try { conn = connectionPool.getConnection(); if(conn != null) { st = conn.createStatement(); int deleted = st.executeUpdate("DELETE FROM jobs"); LOG.debug("Deleted " + deleted + " jobs..."); } } catch (SQLException se) { } finally { try { if(rs != null) rs.close(); if(st != null) st.close(); if(conn != null) conn.close(); } catch (SQLException innerEx) { LOG.debug("Error cleaning up: " + innerEx); } } } @Override public Job findJob(String functionName, String uniqueID) { PreparedStatement st = null; ResultSet rs = null; Connection conn = null; Job job = null; try { conn = connectionPool.getConnection(); if(conn != null) { st = conn.prepareStatement("SELECT * FROM jobs WHERE function_name = ? AND unique_id = ?"); st.setString(1, functionName); st.setString(2, uniqueID); ObjectMapper mapper = new ObjectMapper(); rs = st.executeQuery(); if(rs.next()) { String jobJSON = rs.getString("json_data"); job = mapper.readValue(jobJSON, Job.class); } else { LOG.warn("No job for unique ID: " + uniqueID + " -- this could be an internal consistency problem..."); } } } catch (SQLException se) { LOG.debug(se.toString()); } catch (IOException e) { e.printStackTrace(); } finally { try { if(rs != null) rs.close(); if(st != null) st.close(); if(conn != null) conn.close(); } catch (SQLException innerEx) { LOG.debug("Error cleaning up: " + innerEx); } } return job; } @Override public Collection<QueuedJob> readAll() { LinkedList<QueuedJob> jobs = new LinkedList<>(); Statement st = null; ResultSet rs = null; Connection conn = null; // Which page of results are we on? int pageNum = 0; try { conn = connectionPool.getConnection(); if(conn != null) { st = conn.createStatement(); st.setFetchSize(jobsPerPage); st.setMaxRows(jobsPerPage); LOG.debug("Reading all job data from PostgreSQL"); rs = st.executeQuery("SELECT COUNT(*) AS jobCount FROM jobs"); if(rs.next()) { int totalJobs = rs.getInt("jobCount"); int fetchedJobs = 0; LOG.debug("Reading " + totalJobs + " jobs from PostgreSQL"); QueuedJob currentJob; do { rs = st.executeQuery("SELECT function_name, priority, unique_id, time_to_run FROM jobs LIMIT " + jobsPerPage + " OFFSET " + (pageNum * jobsPerPage)); while(rs.next()) { try { currentJob = new QueuedJob(rs.getString("unique_id"), rs.getLong("time_to_run"), JobPriority.valueOf(rs.getString("priority")), rs.getString("function_name")); jobs.add(currentJob); } catch (Exception e) { LOG.error("Unable to load job '" + rs.getString("unique_id") + "'"); } fetchedJobs += 1; } pageNum += 1; LOG.debug("Loaded " + fetchedJobs + "..."); } while(fetchedJobs != totalJobs); } } } catch (SQLException se) { LOG.debug(se.toString()); } finally { try { if(rs != null) rs.close(); if(st != null) st.close(); if(conn != null) conn.close(); } catch (SQLException innerEx) { LOG.debug("Error cleaning up: " + innerEx); } } return jobs; } @Override public Collection<QueuedJob> getAllForFunction(String functionName) { LinkedList<QueuedJob> jobs = new LinkedList<>(); PreparedStatement st = null; ResultSet rs = null; Connection conn = null; QueuedJob job; try { conn = connectionPool.getConnection(); if(conn != null) { st = conn.prepareStatement("SELECT unique_id, time_to_run, priority FROM jobs WHERE function_name = ?"); st.setString(1, functionName); ObjectMapper mapper = new ObjectMapper(); rs = st.executeQuery(); while(rs.next()) { job = new QueuedJob(rs.getString("unique_id"), rs.getLong("time_to_run"), JobPriority.valueOf(rs.getString("priority")), functionName); jobs.add(job); } } } catch (SQLException se) { LOG.debug(se.toString()); } finally { try { if(rs != null) rs.close(); if(st != null) st.close(); if(conn != null) conn.close(); } catch (SQLException innerEx) { LOG.debug("Error cleaning up: " + innerEx); } } return jobs; } @Override public Job findJobByHandle(String jobHandle) { PreparedStatement st = null; ResultSet rs = null; Connection conn = null; Job job = null; try { conn = connectionPool.getConnection(); if(conn != null) { st = conn.prepareStatement("SELECT * FROM jobs WHERE job_handle = ?"); st.setString(1, jobHandle); ObjectMapper mapper = new ObjectMapper(); rs = st.executeQuery(); if(rs.next()) { String jobJSON = rs.getString("json_data"); job = mapper.readValue(jobJSON, Job.class); } else { LOG.warn("No job for job handle: " + jobHandle + " -- this could be an internal consistency problem..."); } } } catch (SQLException se) { LOG.debug(se.toString()); } catch (IOException e) { e.printStackTrace(); } finally { try { if(rs != null) rs.close(); if(st != null) st.close(); if(conn != null) conn.close(); } catch (SQLException innerEx) { LOG.debug("Error cleaning up: " + innerEx); } } return job; } private boolean validateOrCreateTable() { PreparedStatement st = null; ResultSet rs = null; Connection conn = null; ObjectMapper mapper = new ObjectMapper(); boolean success = false; try { conn = connectionPool.getConnection(); if(conn != null) { DatabaseMetaData dbm = conn.getMetaData(); ResultSet tables = dbm.getTables(null, null, "jobs", null); if(!tables.next()) { st = conn.prepareStatement("CREATE TABLE jobs(id bigserial, unique_id varchar(255), priority varchar(50), function_name varchar(255), time_to_run bigint, job_handle text, json_data text)"); int created = st.executeUpdate(); st = conn.prepareStatement("CREATE INDEX jobs_unique_id ON jobs(unique_id)"); int createdUniqueIDIndex = st.executeUpdate(); st = conn.prepareStatement("CREATE INDEX jobs_job_handle ON jobs(job_handle)"); int createdJobHandleIndex = st.executeUpdate(); if(created > 0) { LOG.debug("Created jobs table"); success = true; } else { LOG.debug("Unable to create jobs table."); success = false; } } else { LOG.debug("Jobs table already exists."); success = true; } } } catch (SQLException se) { se.printStackTrace(); } finally { try { if(st != null) st.close(); if(conn != null) conn.close(); } catch (SQLException innerEx) { innerEx.printStackTrace(); } } return success; } }
/* * Copyright 2013 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.newtypes; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.collect.Multimap; import java.util.List; import java.util.Map; /** * * @author blickly@google.com (Ben Lickly) * @author dimvar@google.com (Dimitris Vardoulakis) */ class Property { private final JSType inferredType; private final JSType declaredType; // Attributes are ordered: constant <= required <= optional private enum Attribute { CONSTANT, // For required props only OPTIONAL, REQUIRED; } private Attribute attribute; private Property( JSType inferredType, JSType declaredType, Attribute attribute) { Preconditions.checkArgument(inferredType != null); this.inferredType = inferredType; this.declaredType = declaredType; this.attribute = attribute; } static Property make(JSType inferredType, JSType declaredType) { return new Property(inferredType, declaredType, Attribute.REQUIRED); } static Property makeConstant(JSType inferredType, JSType declaredType) { return new Property(inferredType, declaredType, Attribute.CONSTANT); } static Property makeOptional(JSType inferredType, JSType declaredType) { return new Property(inferredType, declaredType, Attribute.OPTIONAL); } boolean isOptional() { return attribute == Attribute.OPTIONAL; } boolean isConstant() { return attribute == Attribute.CONSTANT; } boolean isDeclared() { return declaredType != null; } JSType getType() { return inferredType; } JSType getDeclaredType() { return declaredType; } Property withLocation(String location) { return new Property( inferredType.withLocation(location), declaredType == null ? null : declaredType.withLocation(location), attribute); } Property withOptional() { return new Property(inferredType, declaredType, Attribute.OPTIONAL); } Property withRequired() { return new Property(inferredType, declaredType, Attribute.REQUIRED); } private static Attribute meetAttributes(Attribute a1, Attribute a2) { if (a1 == Attribute.CONSTANT || a2 == Attribute.CONSTANT) { return Attribute.CONSTANT; } if (a1 == Attribute.REQUIRED || a2 == Attribute.REQUIRED) { return Attribute.REQUIRED; } return Attribute.OPTIONAL; } private static Attribute joinAttributes(Attribute a1, Attribute a2) { if (a1 == Attribute.OPTIONAL || a2 == Attribute.OPTIONAL) { return Attribute.OPTIONAL; } if (a1 == Attribute.REQUIRED || a2 == Attribute.REQUIRED) { return Attribute.REQUIRED; } return Attribute.CONSTANT; } Property specialize(Property other) { return new Property( this.inferredType.specialize(other.inferredType), this.declaredType, meetAttributes(this.attribute, other.attribute)); } static Property meet(Property p1, Property p2) { return new Property( JSType.meet(p1.inferredType, p2.inferredType), null, meetAttributes(p1.attribute, p2.attribute)); } static Property join(Property p1, Property p2) { JSType declType, p1decl = p1.declaredType, p2decl = p2.declaredType; if (p1decl == null || p2decl == null) { declType = null; } else if (p1decl.equals(p2decl)) { declType = p1decl; } else { declType = null; } return new Property( JSType.join(p1.inferredType, p2.inferredType), declType, joinAttributes(p1.attribute, p2.attribute)); } /** * Unify the two types bidirectionally, ignoring type variables, but * treating JSType.UNKNOWN as a "hole" to be filled. * @return The unified type, or null if unification fails */ static Property unifyUnknowns(Property p1, Property p2) { JSType unifiedDeclaredType = null; if (p1.declaredType != null && p2.declaredType != null) { unifiedDeclaredType = JSType.unifyUnknowns(p1.declaredType, p2.declaredType); if (unifiedDeclaredType == null) { return null; } } JSType unifiedInferredType = JSType.unifyUnknowns(p1.inferredType, p2.inferredType); if (unifiedInferredType == null) { return null; } return new Property( unifiedInferredType, unifiedDeclaredType, meetAttributes(p1.attribute, p2.attribute)); } /** Returns whether unification succeeded */ boolean unifyWith( Property other, List<String> typeParameters, Multimap<String, JSType> typeMultimap) { if (!inferredType.unifyWith( other.inferredType, typeParameters, typeMultimap)) { return false; } else if (declaredType != null && other.declaredType != null && !declaredType.unifyWith( other.declaredType, typeParameters, typeMultimap)) { return false; } return true; } Property substituteGenerics(Map<String, JSType> concreteTypes) { return new Property( inferredType.substituteGenerics(concreteTypes), declaredType == null ? null : declaredType.substituteGenerics(concreteTypes), attribute); } @Override public String toString() { return appendTo(new StringBuilder()).toString(); } public StringBuilder appendTo(StringBuilder builder) { switch (attribute) { case CONSTANT: return inferredType.appendTo(builder).append('^'); case REQUIRED: return inferredType.appendTo(builder); case OPTIONAL: return inferredType.appendTo(builder).append('='); default: throw new RuntimeException("Unknown Attribute value " + attribute); } } @Override public boolean equals(Object o) { Preconditions.checkArgument(o instanceof Property); Property p2 = (Property) o; return inferredType.equals(p2.inferredType) && attribute == p2.attribute; } @Override public int hashCode() { return Objects.hashCode(inferredType, attribute); } }
package org.ihtsdo.otf.security.xml.base; /** * Implementation of the ProcessXslt interface for the Xalan 2 XSLT processor. * * @author Adam Flinton , 2001 * @version 1.1 */ // Imported TraX classes import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.InputStream; import java.util.Enumeration; import java.util.Properties; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.transform.OutputKeys; import javax.xml.transform.Result; import javax.xml.transform.Source; import javax.xml.transform.Templates; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import org.ihtsdo.otf.security.objectcache.ObjectCache; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.xml.sax.InputSource; public class ProcessXSLT { private static final Logger LOG = Logger.getLogger(ProcessXSLT.class .getName()); /** * Create a new ProcessXalan1 object. */ public ProcessXSLT() { super(); } /** * A Method which returns the template */ public static Templates getSheet(final String fileName) throws Exception { Templates s = (Templates) ObjectCache.INSTANCE.get(fileName); if (s == null) { synchronized (ProcessXSLT.class) { s = (Templates) ObjectCache.INSTANCE.get(fileName); if (s == null) { String full = fullqual(fileName); TransformerFactory tFactory = TransformerFactory .newInstance(); s = tFactory.newTemplates(new StreamSource(full)); ObjectCache.INSTANCE.put(fileName, s); } } } return s; // return the cached copy. } /** * Transforms an XML document stored in a file. * * @param xmlfileName * The name of the file * @param xslfileName * The name of the XSLT stylesheet to use * * @return The output of the transformation */ public final String transformFiles(final String xmlfileName, final String xslfileName) throws Exception { String result = ""; String fullfn = fullqual(xslfileName); InputSource src1 = new InputSource(fullfn); String srcURL1 = src1.getSystemId(); Templates s = getSheet(srcURL1); String full = fullqual(xmlfileName); InputSource src = new InputSource(full); String srcURL = src.getSystemId(); java.io.StringWriter sw = new java.io.StringWriter(); Transformer trans = s.newTransformer(); trans.transform(new StreamSource(srcURL), new StreamResult(sw)); result = sw.toString(); return result; } /** * Transforms an XML document stored in an InputStream. * * @param srcStream * The InputStream * @param xslfileName * The name of the XSLT stylesheet to use * * @return The output of the transformation */ public final String transformStream(final java.io.InputStream srcStream, final String xslfileName) throws Exception { String result = ""; String fullfn = fullqual(xslfileName); InputSource src1 = new InputSource(fullfn); String srcURL1 = src1.getSystemId(); Templates s = getSheet(srcURL1); java.io.StringWriter sw = new java.io.StringWriter(); Transformer trans = s.newTransformer(); trans.transform(new StreamSource(srcStream), new StreamResult(sw)); result = sw.toString(); return result; } public final InputStream transformStream2Stream( final java.io.InputStream srcStream, final String xslfileName) throws Exception { String fullfn = fullqual(xslfileName); InputSource src1 = new InputSource(fullfn); String srcURL1 = src1.getSystemId(); Templates s = getSheet(srcURL1); Transformer trans = s.newTransformer(); ByteArrayOutputStream out = new ByteArrayOutputStream(); trans.transform(new StreamSource(srcStream), new StreamResult(out)); return new ByteArrayInputStream(out.toByteArray()); } /** * Transforms an XML document stored in a string. * * @param inputxml * The string * * @return The output of the transformation */ public static String transformString(final String inputxml) throws Exception { String media = null, title = null, charset = null; String result = ""; String full = fullqual(inputxml); InputSource src = new InputSource(full); String srcURL = src.getSystemId(); try { TransformerFactory tFactory = TransformerFactory.newInstance(); Source stylesheet = tFactory.getAssociatedStylesheet( new StreamSource(srcURL), media, title, charset); Transformer transformer = tFactory.newTransformer(stylesheet); java.io.StringWriter sw = new java.io.StringWriter(); transformer.transform(new StreamSource(srcURL), new StreamResult(sw)); result = sw.toString(); } catch (Exception e) { LOG.log(Level.SEVERE, "An exception has occurred", e); } return result; } /** * Transforms an XML document stored in a string. * * @param inputxml * The string * @param xslfileName * The name of the XSLT stylesheet to use * * @return The output of the transformation */ public final String transformString(final String inputxml, final String xslfileName) throws Exception { String result = ""; String fullfn = fullqual(xslfileName); InputSource src1 = new InputSource(fullfn); String srcURL1 = src1.getSystemId(); Templates s = getSheet(srcURL1); byte[] buf = inputxml.getBytes(); InputStream is = new ByteArrayInputStream(buf); java.io.StringWriter sw = new java.io.StringWriter(); Transformer trans = s.newTransformer(); trans.transform(new StreamSource(is), new StreamResult(sw)); result = sw.toString(); return result; } public final Document transformDoc(final Node inputxml, final String xslfileName) throws Exception { DOMSource domSource = new DOMSource(inputxml); DocumentBuilder builder = DocumentBuilderFactory.newInstance() .newDocumentBuilder(); Document resultD = builder.newDocument(); Result result = new DOMResult(resultD); String fullfn = fullqual(xslfileName); InputSource src1 = new InputSource(fullfn); String srcURL1 = src1.getSystemId(); Templates s = getSheet(srcURL1); Transformer trans = s.newTransformer(); trans.transform(domSource, result); return resultD; } public final Document transformNodeProps(final Node inputxml, final String xslfileName, final Properties props) throws Exception { DOMSource domSource = new DOMSource(inputxml); DocumentBuilder builder = DocumentBuilderFactory.newInstance() .newDocumentBuilder(); Document resultD = builder.newDocument(); Result result = new DOMResult(resultD); String fullfn = fullqual(xslfileName); InputSource src1 = new InputSource(fullfn); String srcURL1 = src1.getSystemId(); Templates s = getSheet(srcURL1); Transformer trans = s.newTransformer(); trans.setOutputProperties(props); trans.transform(domSource, result); return resultD; } public final Document transformNodeIncDoc(final Node inputxml, final String xslfileName, final String xMLFilename, final Properties props) throws Exception { boolean isNode = false; if (inputxml.getNodeType() != Node.DOCUMENT_NODE) { isNode = true; } String fullfn = fullqual(xslfileName); InputSource src1 = new InputSource(fullfn); String srcURL1 = src1.getSystemId(); Templates s = getSheet(srcURL1); Transformer trans = s.newTransformer(); DOMSource domSource = null; DocumentBuilder builder = DocumentBuilderFactory.newInstance() .newDocumentBuilder(); Document resultD = builder.newDocument(); Result result = new DOMResult(resultD); if (isNode) { Document parent = inputxml.getOwnerDocument(); domSource = new DOMSource(parent); Element inputE = (Element) inputxml; String uuid = inputE.getAttribute(CommonXMLStatics.SECRETUUID); trans.setParameter(CommonXMLStatics.ACTION_PARAM_NODELOCATE, uuid); if (props != null && !props.isEmpty()) { for (Enumeration<Object> e = props.keys(); e.hasMoreElements();) { String key = (String) e.nextElement(); String value = props.getProperty(key); trans.setParameter(key, value); } } } else { domSource = new DOMSource(inputxml); } trans.setParameter(CommonXMLStatics.ACTION_PARAM_XMLFILENAME, xMLFilename); trans.transform(domSource, result); return resultD; } public final Transformer getTransformer(final String xslfilename) throws Exception { Templates s = getSheet(xslfilename); Transformer trans = s.newTransformer(); return trans; } public static Transformer getEmptyTransformer() throws TransformerConfigurationException { String def = "DEFAULTTRANSFORMER"; Transformer transformer = null; transformer = (Transformer) ObjectCache.INSTANCE.get(def); if (transformer == null) { synchronized (ProcessXSLT.class) { transformer = (Transformer) ObjectCache.INSTANCE.get(def); if (transformer == null) { // may have changed between first if and synch call... transformer = TransformerFactory.newInstance() .newTransformer(); Properties props = new Properties(); props.put(OutputKeys.METHOD, "xml"); props.put(OutputKeys.OMIT_XML_DECLARATION, "yes"); props.put(OutputKeys.INDENT, "yes"); transformer.setOutputProperties(props); ObjectCache.INSTANCE.put(def, transformer); } } } return transformer; } public static String fullqual(final String filename) throws Exception { String fname = filename; String fqn = ""; java.net.URL fins = ProcessXSLT.class.getResource(filename); File f1; f1 = new File(fname); if (f1.exists()) { try { fname = f1.toURL().toString(); } catch (java.net.MalformedURLException mfu) { LOG.log(Level.SEVERE, "java.Net.MalfromedURLException caught in GetFileURL.fullqual Error = ", mfu); } return fname; } // end checking if local file or URL else if (fins != null) { return fins.toString(); } else { fqn = fname; return fqn; } } }
package mat.model; import com.google.gwt.user.client.rpc.IsSerializable; /** * The Class QualityDataSetDTO. */ public class QualityDataSetDTO implements IsSerializable { /** * The Class Comparator. */ public static class Comparator implements java.util.Comparator<QualityDataSetDTO>, IsSerializable { /* (non-Javadoc) * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) */ @Override public int compare(QualityDataSetDTO o1, QualityDataSetDTO o2) { return o1.getQDMElement().compareTo(o2.getQDMElement()); } } /** The code list name. */ private String codeListName; /** The code system name. */ private String codeSystemName; /** The data type. */ private String dataType; /** The effective date. */ private String effectiveDate; /** QDM Modified At VSAC. */ private boolean hasModifiedAtVSAC; /** The id. */ private String id; /** The is used. */ private boolean isUsed; /** QDM is not available in VSAC. */ private boolean notFoundInVSAC; /** The occurrence text. */ private String occurrenceText; /** * The Specific Occurrence Boolean. */ private boolean specificOccurrence; /** The oid. */ private String oid; /** The supp data element. */ private boolean suppDataElement; /** The taxonomy. */ private String taxonomy; /** The type. */ private String type; /** The uuid. */ private String uuid; /** The version. */ private String version; /** The data type has removed. */ private boolean dataTypeHasRemoved; /** The expansion profile. */ private String expansionIdentifier; private String vsacExpIdentifier; private String valueSetType; /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals (Object o) { final QualityDataSetDTO temp = (QualityDataSetDTO) o; if (temp.getId().equals(getId())) { return true; } return false; } /** * Gets the code list name. * * @return the code list name */ public String getCodeListName() { return codeListName; } /** * Gets the code system name. * * @return the code system name */ public String getCodeSystemName() { return codeSystemName; } /** * Gets the data type. * * @return the data type */ public String getDataType() { return dataType; } /** Gets the effective date. * * @return the effectiveDate */ public String getEffectiveDate() { return effectiveDate; } /** * Gets the checks for modified at vsac. * * @return the hasModifiedAtVSAC */ public boolean getHasModifiedAtVSAC() { return hasModifiedAtVSAC; } /** * Gets the id. * * @return the id */ public String getId() { return id; } /** * Gets the occurrence text. * * @return the occurrence text */ public String getOccurrenceText() { return occurrenceText; } /** * Gets the oid. * * @return the oid */ public String getOid() { return oid; } /** * Gets the qDM element. * * @return the qDM element */ public String getQDMElement() { return codeListName + ": " + dataType; } /** * Gets the taxonomy. * * @return the taxonomy */ public String getTaxonomy() { return taxonomy; } /** * Gets the type. * * @return the type */ public String getType() { return type; } /** * Gets the uuid. * * @return the uuid */ public String getUuid() { return uuid; } /** * Gets the version. * * @return the version */ public String getVersion() { return version; } /** * Checks if is not found in vsac. * * @return the notFoundInVSAC */ public boolean isNotFoundInVSAC() { return notFoundInVSAC; } /** * Checks if is supp data element. * * @return true, if is supp data element */ public boolean isSuppDataElement() { return suppDataElement; } /** * Checks if is used. * * @return true, if is used */ public boolean isUsed() { return isUsed; } /** * Sets the code list name. * * @param codeListName * the new code list name */ public void setCodeListName(String codeListName) { this.codeListName = codeListName; } /** * Sets the code system name. * * @param codeSystemName * the new code system name */ public void setCodeSystemName(String codeSystemName) { this.codeSystemName = codeSystemName; } /** * Sets the data type. * * @param dataType * the new data type */ public void setDataType(String dataType) { this.dataType = dataType; } /** Sets the effective date. * * @param effectiveDate the effectiveDate to set */ public void setEffectiveDate(String effectiveDate) { this.effectiveDate = effectiveDate; } /** * Sets the checks for modified at vsac. * * @param hasModifiedAtVSAC the hasModifiedAtVSAC to set */ public void setHasModifiedAtVSAC(boolean hasModifiedAtVSAC) { this.hasModifiedAtVSAC = hasModifiedAtVSAC; } /** * Sets the id. * * @param id * the new id */ public void setId(String id) { this.id = id; } /** * Sets the not found in vsac. * * @param notFoundInVSAC the notFoundInVSAC to set */ public void setNotFoundInVSAC(boolean notFoundInVSAC) { this.notFoundInVSAC = notFoundInVSAC; } /** * Sets the occurrence text. * * @param occurrenceText * the new occurrence text */ public void setOccurrenceText(String occurrenceText) { this.occurrenceText = occurrenceText; } /** * Sets the oid. * * @param oid * the new oid */ public void setOid(String oid) { this.oid = oid; } /** * Sets the supp data element. * * @param suppDataElement * the new supp data element */ public void setSuppDataElement(boolean suppDataElement) { this.suppDataElement = suppDataElement; } /** * Sets the taxonomy. * * @param taxonomy * the new taxonomy */ public void setTaxonomy(String taxonomy) { this.taxonomy = taxonomy; } /** * Sets the type. * * @param type * the type to set */ public void setType(String type) { this.type = type; } /** * Sets the used. * * @param isUsed * the new used */ public void setUsed(boolean isUsed) { this.isUsed = isUsed; } /** * Sets the uuid. * * @param uuid * the new uuid */ public void setUuid(String uuid) { this.uuid = uuid; } /** * Sets the version. * * @param version * the new version */ public void setVersion(String version) { this.version = version; } /** * Checks if is data type has removed. * * @return true, if is data type has removed */ public boolean isDataTypeHasRemoved() { return dataTypeHasRemoved; } /** * Sets the data type has removed. * * @param dataTypeHasRemoved the new data type has removed */ public void setDataTypeHasRemoved(boolean dataTypeHasRemoved) { this.dataTypeHasRemoved = dataTypeHasRemoved; } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { if ((occurrenceText!= null) && !occurrenceText.equals("")) { return occurrenceText + " of " + codeListName + ": " + dataType + "-" + getOid(); } else { return codeListName + ": " + dataType + "-" + getOid(); } } /** * Compare. * * @param o1 the o1 * @param o2 the o2 * @return the int */ public int compare(QualityDataSetDTO o1, QualityDataSetDTO o2) { final int num = o1.getUuid().compareTo(o2.getUuid()); return num; } /** * Checks if is specific occurrence. * * @return the specificOccurrence */ public boolean isSpecificOccurrence() { return specificOccurrence; } /** * Sets the specific occurrence. * * @param specificOccurrence the specificOccurrence to set */ public void setSpecificOccurrence(boolean specificOccurrence) { this.specificOccurrence = specificOccurrence; } /** * Gets the expansion Identifier. * * @return the expansion Identifier */ public String getExpansionIdentifier() { return expansionIdentifier; } /** * Sets the expansion Identifier. * * @param expansionProfile the new expansion Identifier */ public void setExpansionIdentifier(String expansionIdentifier) { this.expansionIdentifier = expansionIdentifier; } public String getVsacExpIdentifier() { return vsacExpIdentifier; } public void setVsacExpIdentifier(String vsacExpIdentifier) { this.vsacExpIdentifier = vsacExpIdentifier; } public String getValueSetType() { return valueSetType; } public void setValueSetType(String valueSetType) { this.valueSetType = valueSetType; } public QualityDataSetDTO() { super(); } public QualityDataSetDTO(String id, String dataType, String codeListName, String occurrenceText) { super(); this.id = id; this.dataType = dataType; this.codeListName = codeListName; this.occurrenceText = occurrenceText; } public QualityDataSetDTO(String id, String dataType, String codeListName, String occurrenceText, String oid, boolean suppDataElement) { super(); this.id = id; this.dataType = dataType; this.codeListName = codeListName; this.occurrenceText = occurrenceText; this.oid = oid; this.suppDataElement = suppDataElement; } }
package apple.naturallanguage; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import apple.foundation.NSValue; import apple.foundation.struct.NSRange; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.ByValue; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.BoolPtr; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCBlock; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; @Generated @Library("NaturalLanguage") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class NLTokenizer extends NSObject { static { NatJ.register(); } @Generated protected NLTokenizer(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native NLTokenizer alloc(); @Owned @Generated @Selector("allocWithZone:") public static native NLTokenizer allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); /** * The tokenizer will segment the string as needed into tokens for the given unit, and return those ranges via a block iterator, that iterates over all tokens intersecting a given range, supplying token ranges and flags. The range passed in must not extend beyond the end of the tokenizer's string, or the method will raise an exception. Note that a given instance of NLTokenizer should not be used from more than one thread simultaneously. */ @Generated @Selector("enumerateTokensInRange:usingBlock:") public native void enumerateTokensInRangeUsingBlock(@ByValue NSRange range, @ObjCBlock(name = "call_enumerateTokensInRangeUsingBlock") Block_enumerateTokensInRangeUsingBlock block); @Runtime(ObjCRuntime.class) @Generated public interface Block_enumerateTokensInRangeUsingBlock { @Generated void call_enumerateTokensInRangeUsingBlock(@ByValue NSRange tokenRange, @NUInt long flags, BoolPtr stop); } @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("init") public native NLTokenizer init(); @Generated @Selector("initWithUnit:") public native NLTokenizer initWithUnit(@NInt long unit); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native NLTokenizer new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); /** * Clients may specify the language of the string, if it is known; otherwise it will be determined from the text. */ @Generated @Selector("setLanguage:") public native void setLanguage(String language); /** * An NLTokenizer instance must be assigned a string to tokenize, and clients can then obtain ranges for tokens in that string appropriate to the tokenizer's unit. */ @Generated @Selector("setString:") public native void setString(String value); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); /** * An NLTokenizer instance must be assigned a string to tokenize, and clients can then obtain ranges for tokens in that string appropriate to the tokenizer's unit. */ @Generated @Selector("string") public native String string(); @Generated @Selector("superclass") public static native Class superclass_static(); /** * Returns the range corresponding to the token for the tokenizer's unit that contains the given character index. The index must not extend beyond the end of the tokenizer's string, or the method will raise an exception. */ @Generated @Selector("tokenRangeAtIndex:") @ByValue public native NSRange tokenRangeAtIndex(@NUInt long characterIndex); /** * Returns the ranges corresponding to the tokens for the tokenizer's unit that intersect the given range. */ @Generated @Selector("tokensForRange:") public native NSArray<? extends NSValue> tokensForRange(@ByValue NSRange range); @Generated @Selector("unit") @NInt public native long unit(); @Generated @Selector("version") @NInt public static native long version_static(); /** * Returns the smallest range covering all tokens for the tokenizer's unit intersecting the given range. If range.length == 0, this is equivalent to tokenRangeAtIndex:. */ @Generated @Selector("tokenRangeForRange:") @ByValue public native NSRange tokenRangeForRange(@ByValue NSRange range); }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFOxmTcpSrcMaskedVer14 implements OFOxmTcpSrcMasked { private static final Logger logger = LoggerFactory.getLogger(OFOxmTcpSrcMaskedVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int LENGTH = 8; private final static TransportPort DEFAULT_VALUE = TransportPort.NONE; private final static TransportPort DEFAULT_VALUE_MASK = TransportPort.NONE; // OF message fields private final TransportPort value; private final TransportPort mask; // // Immutable default instance final static OFOxmTcpSrcMaskedVer14 DEFAULT = new OFOxmTcpSrcMaskedVer14( DEFAULT_VALUE, DEFAULT_VALUE_MASK ); // package private constructor - used by readers, builders, and factory OFOxmTcpSrcMaskedVer14(TransportPort value, TransportPort mask) { if(value == null) { throw new NullPointerException("OFOxmTcpSrcMaskedVer14: property value cannot be null"); } if(mask == null) { throw new NullPointerException("OFOxmTcpSrcMaskedVer14: property mask cannot be null"); } this.value = value; this.mask = mask; } // Accessors for OF message fields @Override public long getTypeLen() { return 0x80001b04L; } @Override public TransportPort getValue() { return value; } @Override public TransportPort getMask() { return mask; } @Override public MatchField<TransportPort> getMatchField() { return MatchField.TCP_SRC; } @Override public boolean isMasked() { return true; } public OFOxm<TransportPort> getCanonical() { if (TransportPort.NO_MASK.equals(mask)) { return new OFOxmTcpSrcVer14(value); } else if(TransportPort.FULL_MASK.equals(mask)) { return null; } else { return this; } } @Override public OFVersion getVersion() { return OFVersion.OF_14; } public OFOxmTcpSrcMasked.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFOxmTcpSrcMasked.Builder { final OFOxmTcpSrcMaskedVer14 parentMessage; // OF message fields private boolean valueSet; private TransportPort value; private boolean maskSet; private TransportPort mask; BuilderWithParent(OFOxmTcpSrcMaskedVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public long getTypeLen() { return 0x80001b04L; } @Override public TransportPort getValue() { return value; } @Override public OFOxmTcpSrcMasked.Builder setValue(TransportPort value) { this.value = value; this.valueSet = true; return this; } @Override public TransportPort getMask() { return mask; } @Override public OFOxmTcpSrcMasked.Builder setMask(TransportPort mask) { this.mask = mask; this.maskSet = true; return this; } @Override public MatchField<TransportPort> getMatchField() { return MatchField.TCP_SRC; } @Override public boolean isMasked() { return true; } @Override public OFOxm<TransportPort> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.4"); } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFOxmTcpSrcMasked build() { TransportPort value = this.valueSet ? this.value : parentMessage.value; if(value == null) throw new NullPointerException("Property value must not be null"); TransportPort mask = this.maskSet ? this.mask : parentMessage.mask; if(mask == null) throw new NullPointerException("Property mask must not be null"); // return new OFOxmTcpSrcMaskedVer14( value, mask ); } } static class Builder implements OFOxmTcpSrcMasked.Builder { // OF message fields private boolean valueSet; private TransportPort value; private boolean maskSet; private TransportPort mask; @Override public long getTypeLen() { return 0x80001b04L; } @Override public TransportPort getValue() { return value; } @Override public OFOxmTcpSrcMasked.Builder setValue(TransportPort value) { this.value = value; this.valueSet = true; return this; } @Override public TransportPort getMask() { return mask; } @Override public OFOxmTcpSrcMasked.Builder setMask(TransportPort mask) { this.mask = mask; this.maskSet = true; return this; } @Override public MatchField<TransportPort> getMatchField() { return MatchField.TCP_SRC; } @Override public boolean isMasked() { return true; } @Override public OFOxm<TransportPort> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.4"); } @Override public OFVersion getVersion() { return OFVersion.OF_14; } // @Override public OFOxmTcpSrcMasked build() { TransportPort value = this.valueSet ? this.value : DEFAULT_VALUE; if(value == null) throw new NullPointerException("Property value must not be null"); TransportPort mask = this.maskSet ? this.mask : DEFAULT_VALUE_MASK; if(mask == null) throw new NullPointerException("Property mask must not be null"); return new OFOxmTcpSrcMaskedVer14( value, mask ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFOxmTcpSrcMasked> { @Override public OFOxmTcpSrcMasked readFrom(ByteBuf bb) throws OFParseError { // fixed value property typeLen == 0x80001b04L int typeLen = bb.readInt(); if(typeLen != (int) 0x80001b04) throw new OFParseError("Wrong typeLen: Expected=0x80001b04L(0x80001b04L), got="+typeLen); TransportPort value = TransportPort.read2Bytes(bb); TransportPort mask = TransportPort.read2Bytes(bb); OFOxmTcpSrcMaskedVer14 oxmTcpSrcMaskedVer14 = new OFOxmTcpSrcMaskedVer14( value, mask ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", oxmTcpSrcMaskedVer14); return oxmTcpSrcMaskedVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFOxmTcpSrcMaskedVer14Funnel FUNNEL = new OFOxmTcpSrcMaskedVer14Funnel(); static class OFOxmTcpSrcMaskedVer14Funnel implements Funnel<OFOxmTcpSrcMaskedVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFOxmTcpSrcMaskedVer14 message, PrimitiveSink sink) { // fixed value property typeLen = 0x80001b04L sink.putInt((int) 0x80001b04); message.value.putTo(sink); message.mask.putTo(sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFOxmTcpSrcMaskedVer14> { @Override public void write(ByteBuf bb, OFOxmTcpSrcMaskedVer14 message) { // fixed value property typeLen = 0x80001b04L bb.writeInt((int) 0x80001b04); message.value.write2Bytes(bb); message.mask.write2Bytes(bb); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFOxmTcpSrcMaskedVer14("); b.append("value=").append(value); b.append(", "); b.append("mask=").append(mask); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFOxmTcpSrcMaskedVer14 other = (OFOxmTcpSrcMaskedVer14) obj; if (value == null) { if (other.value != null) return false; } else if (!value.equals(other.value)) return false; if (mask == null) { if (other.mask != null) return false; } else if (!mask.equals(other.mask)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((value == null) ? 0 : value.hashCode()); result = prime * result + ((mask == null) ? 0 : mask.hashCode()); return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.utils.manifest; import java.util.ArrayList; import java.util.List; public final class Parser { private Parser() { } public static Clause[] parseHeader(String header) throws IllegalArgumentException { Clause[] clauses = null; if (header != null) { if (header.length() == 0) { throw new IllegalArgumentException("The header cannot be an empty string."); } String[] ss = parseDelimitedString(header, ","); clauses = parseClauses(ss); } return (clauses == null) ? new Clause[0] : clauses; } public static Clause[] parseClauses(String[] ss) throws IllegalArgumentException { if (ss == null) { return null; } List completeList = new ArrayList(); for (int ssIdx = 0; ssIdx < ss.length; ssIdx++) { // Break string into semi-colon delimited pieces. String[] pieces = parseDelimitedString(ss[ssIdx], ";"); // Count the number of different clauses; clauses // will not have an '=' in their string. This assumes // that clauses come first, before directives and // attributes. int pathCount = 0; for (int pieceIdx = 0; pieceIdx < pieces.length; pieceIdx++) { if (pieces[pieceIdx].indexOf('=') >= 0) { break; } pathCount++; } // Error if no packages were specified. if (pathCount == 0) { throw new IllegalArgumentException("No path specified on clause: " + ss[ssIdx]); } // Parse the directives/attributes. Directive[] dirs = new Directive[pieces.length - pathCount]; Attribute[] attrs = new Attribute[pieces.length - pathCount]; int dirCount = 0, attrCount = 0; int idx = -1; String sep = null; for (int pieceIdx = pathCount; pieceIdx < pieces.length; pieceIdx++) { // Check if it is a directive. if ((idx = pieces[pieceIdx].indexOf(":=")) >= 0) { sep = ":="; } // Check if it is an attribute. else if ((idx = pieces[pieceIdx].indexOf("=")) >= 0) { sep = "="; } // It is an error. else { throw new IllegalArgumentException("Not a directive/attribute: " + ss[ssIdx]); } String key = pieces[pieceIdx].substring(0, idx).trim(); String value = pieces[pieceIdx].substring(idx + sep.length()).trim(); // Remove quotes, if value is quoted. if (value.startsWith("\"") && value.endsWith("\"")) { value = value.substring(1, value.length() - 1); } // Save the directive/attribute in the appropriate array. if (sep.equals(":=")) { dirs[dirCount++] = new Directive(key, value); } else { attrs[attrCount++] = new Attribute(key, value); } } // Shrink directive array. Directive[] dirsFinal = new Directive[dirCount]; System.arraycopy(dirs, 0, dirsFinal, 0, dirCount); // Shrink attribute array. Attribute[] attrsFinal = new Attribute[attrCount]; System.arraycopy(attrs, 0, attrsFinal, 0, attrCount); // Create package attributes for each package and // set directives/attributes. Add each package to // completel list of packages. Clause[] pkgs = new Clause[pathCount]; for (int pkgIdx = 0; pkgIdx < pathCount; pkgIdx++) { pkgs[pkgIdx] = new Clause(pieces[pkgIdx], dirsFinal, attrsFinal); completeList.add(pkgs[pkgIdx]); } } Clause[] pkgs = (Clause[]) completeList.toArray(new Clause[completeList.size()]); return pkgs; } /** * Parses delimited string and returns an array containing the tokens. This * parser obeys quotes, so the delimiter character will be ignored if it is * inside of a quote. This method assumes that the quote character is not * included in the set of delimiter characters. * @param value the delimited string to parse. * @param delim the characters delimiting the tokens. * @return an array of string tokens or null if there were no tokens. **/ public static String[] parseDelimitedString(String value, String delim) { if (value == null) { value = ""; } List list = new ArrayList(); int CHAR = 1; int DELIMITER = 2; int STARTQUOTE = 4; int ENDQUOTE = 8; StringBuffer sb = new StringBuffer(); int expecting = (CHAR | DELIMITER | STARTQUOTE); for (int i = 0; i < value.length(); i++) { char c = value.charAt(i); boolean isDelimiter = (delim.indexOf(c) >= 0); boolean isQuote = (c == '"'); if (isDelimiter && ((expecting & DELIMITER) > 0)) { list.add(sb.toString().trim()); sb.delete(0, sb.length()); expecting = (CHAR | DELIMITER | STARTQUOTE); } else if (isQuote && ((expecting & STARTQUOTE) > 0)) { sb.append(c); expecting = CHAR | ENDQUOTE; } else if (isQuote && ((expecting & ENDQUOTE) > 0)) { sb.append(c); expecting = (CHAR | STARTQUOTE | DELIMITER); } else if ((expecting & CHAR) > 0) { sb.append(c); } else { throw new IllegalArgumentException("Invalid delimited string: " + value); } } String s = sb.toString().trim(); if (s.length() > 0) { list.add(s); } return (String[]) list.toArray(new String[list.size()]); } }
package eu.modernmt.api.framework; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.JsonParser; import eu.modernmt.api.framework.actions.Action; import eu.modernmt.api.framework.routing.RouteTemplate; import eu.modernmt.api.framework.routing.RouteTree; import eu.modernmt.api.framework.routing.TemplateException; import org.apache.commons.io.IOUtils; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.Part; import java.io.IOException; import java.io.Reader; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.nio.charset.Charset; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.UUID; public class RESTRequest { public static final String HTTP_METHOD_HEADER = "X-HTTP-Method-Override"; private static final JsonObject NULL_OBJECT = new JsonObject(); private static final JsonArray NULL_ARRAY = new JsonArray(); private HttpServletRequest request; private String path = null; private List<String> pathTokens = null; private RouteTemplate template = null; private HttpMethod method = null; private String queryString = null; private HashMap<String, Object> parameters = null; private JsonObject jsonObject = null; private JsonArray jsonArray = null; private String json = null; private String toString = null; public RESTRequest(HttpServletRequest request, RouteTree routes) { this.request = request; this.template = routes.get(getHttpMethod(), getPath()); } public String getRemoteAddr() { return request.getRemoteAddr(); } public String getPath() { if (path == null) { path = request.getPathInfo(); if (path.length() > 0 && path.charAt(0) == '/') path = path.substring(1); } return path; } public RouteTemplate getTemplate() { return template; } public Class<? extends Action> getActionClass() { if (template != null) return template.getActionClass(); else return null; } public HttpMethod getHttpMethod() { if (method == null) { String actual = request.getMethod(); String header = request.getHeader(HTTP_METHOD_HEADER); String strMethod = header == null ? actual : header; try { this.method = HttpMethod.valueOf(strMethod.toUpperCase()); } catch (Throwable e) { this.method = HttpMethod.GET; } } return method; } private boolean isContentType(String contentType) { String found = request.getContentType(); if (found == null || found.isEmpty()) return false; for (String token : found.split(";")) if (contentType.equalsIgnoreCase(token)) return true; return false; } public int getContentLength() { return request.getContentLength(); } public Reader getPlainTextContent() throws IOException { if (isContentType("text/plain")) { return request.getReader(); } else { return null; } } public String getQueryString() { if (queryString == null) { String qs = request.getQueryString(); String encoding = request.getCharacterEncoding(); if (qs == null || qs.isEmpty()) { qs = ""; if (isContentType("application/x-www-form-urlencoded")) try { qs = IOUtils.toString(request.getInputStream(), encoding); } catch (IOException e) { // Ignore it } } this.queryString = qs; } return queryString; } private String getJSONContent() { if (json == null) { String encoding = request.getCharacterEncoding(); this.json = ""; if (isContentType("application/json")) { try { this.json = IOUtils.toString(request.getInputStream(), encoding); } catch (IOException e) { } } } return this.json; } private void ensureParameters() throws Parameters.ParameterParsingException { this.parameters = new HashMap<>(); String encoding = request.getCharacterEncoding(); String queryString = this.getQueryString(); if (!queryString.isEmpty()) { String[] params = queryString.split("&"); for (String param : params) { String[] tokens = param.split("="); if (tokens.length == 2) { try { this.parameters.put(tokens[0], URLDecoder .decode(tokens[1], encoding).trim()); } catch (UnsupportedEncodingException e) { // This should never happen } } else { this.parameters.put(tokens[0], ""); } } } else if (isContentType("multipart/form-data")) { Collection<Part> parts = null; try { parts = request.getParts(); } catch (IOException e) { throw new Parameters.ParameterParsingException("Failed to retrieve multipart/form-data request parts", e); } catch (ServletException e) { // thrown if this request is not of type "multipart/form-data" // ignore it } if (parts != null) { for (Part part : parts) { String contentType = part.getContentType(); String name = part.getName(); String file = part.getSubmittedFileName(); Object value = null; if (contentType == null && file == null) { // Assume parameter is a string try { value = IOUtils.toString(part.getInputStream(), Charset.defaultCharset()); } catch (IOException e) { throw new Parameters.ParameterParsingException("Unable to read parameter '" + name + "'"); } } else if ("application/octet-stream".equals(contentType) && file != null) { value = new FileParameter(part); } if (value != null) this.parameters.put(name, value); } } } } public String getParameter(String name) throws Parameters.ParameterParsingException { if (parameters == null) this.ensureParameters(); Object value = parameters.get(name); return (value == null || !(value instanceof String)) ? null : (String) value; } public FileParameter getFile(String name) throws Parameters.ParameterParsingException { if (parameters == null) this.ensureParameters(); Object value = parameters.get(name); return (value == null || !(value instanceof FileParameter)) ? null : (FileParameter) value; } public String getPathParameter(String varname) throws TemplateException { if (pathTokens == null) { String path = getPath(); pathTokens = RouteTemplate.tokenize(path); } String varName = ':' + varname; int i = template.indexOfToken(varName); if (i < 0) throw new TemplateException(varName); String value = pathTokens.get(i).trim(); if (value.isEmpty()) throw new TemplateException(varName); return value; } public long getPathParameterAsLong(String varname) throws TemplateException { try { return Long.parseLong(getPathParameter(varname)); } catch (NumberFormatException e) { throw new TemplateException(varname); } } public int getPathParameterAsInt(String varname) throws TemplateException { try { return Integer.parseInt(getPathParameter(varname)); } catch (NumberFormatException e) { throw new TemplateException(varname); } } public UUID getPathParameterAsUUID(String varname) throws TemplateException { try { return UUID.fromString(getPathParameter(varname)); } catch (IllegalArgumentException e) { throw new TemplateException(varname); } } public JsonObject getJSONObject() { if (jsonObject == null) { String json = getJSONContent(); JsonObject parsed = null; try { JsonParser parser = new JsonParser(); parsed = parser.parse(json).getAsJsonObject(); } catch (JsonParseException | IllegalStateException e) { // Skip } if (parsed == null) this.jsonObject = NULL_OBJECT; else this.jsonObject = parsed; } if (jsonObject == NULL_OBJECT) return null; else return jsonObject; } public JsonArray getJSONArray() { if (jsonArray == null) { String json = getJSONContent(); JsonArray parsed = null; try { JsonParser parser = new JsonParser(); parsed = parser.parse(json).getAsJsonArray(); } catch (JsonParseException | IllegalStateException e) { // Skip } if (parsed == null) this.jsonArray = NULL_ARRAY; else this.jsonArray = parsed; } if (jsonArray == NULL_ARRAY) return null; else return jsonArray; } @Override public String toString() { if (toString == null) { StringBuilder builder = new StringBuilder(); builder.append(getHttpMethod()); builder.append(" /"); builder.append(getPath()); String query = getQueryString(); if (!query.isEmpty()) { builder.append('?'); builder.append(query); } toString = builder.toString(); } return toString; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.postoffice.impl; import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.activemq.artemis.api.core.ActiveMQAddressFullException; import org.apache.activemq.artemis.api.core.ActiveMQDuplicateIdException; import org.apache.activemq.artemis.api.core.ActiveMQNonExistentQueueException; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.Pair; import org.apache.activemq.artemis.api.core.RoutingType; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.management.CoreNotificationType; import org.apache.activemq.artemis.api.core.management.ManagementHelper; import org.apache.activemq.artemis.api.core.management.NotificationType; import org.apache.activemq.artemis.core.config.WildcardConfiguration; import org.apache.activemq.artemis.core.filter.Filter; import org.apache.activemq.artemis.core.io.IOCallback; import org.apache.activemq.artemis.core.message.impl.CoreMessage; import org.apache.activemq.artemis.core.paging.PagingManager; import org.apache.activemq.artemis.core.paging.PagingStore; import org.apache.activemq.artemis.core.persistence.StorageManager; import org.apache.activemq.artemis.core.postoffice.AddressManager; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.postoffice.BindingType; import org.apache.activemq.artemis.core.postoffice.Bindings; import org.apache.activemq.artemis.core.postoffice.BindingsFactory; import org.apache.activemq.artemis.core.postoffice.DuplicateIDCache; import org.apache.activemq.artemis.core.postoffice.PostOffice; import org.apache.activemq.artemis.core.postoffice.QueueBinding; import org.apache.activemq.artemis.core.postoffice.QueueInfo; import org.apache.activemq.artemis.core.postoffice.RoutingStatus; import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle; import org.apache.activemq.artemis.core.server.ActiveMQScheduledComponent; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.ActiveMQServerLogger; import org.apache.activemq.artemis.core.server.LargeServerMessage; import org.apache.activemq.artemis.core.server.MessageReference; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.server.QueueFactory; import org.apache.activemq.artemis.core.server.RouteContextList; import org.apache.activemq.artemis.core.server.RoutingContext; import org.apache.activemq.artemis.core.server.group.GroupingHandler; import org.apache.activemq.artemis.core.server.impl.AddressInfo; import org.apache.activemq.artemis.core.server.impl.RoutingContextImpl; import org.apache.activemq.artemis.core.server.management.ManagementService; import org.apache.activemq.artemis.core.server.management.Notification; import org.apache.activemq.artemis.core.server.management.NotificationListener; import org.apache.activemq.artemis.core.settings.HierarchicalRepository; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.core.transaction.Transaction; import org.apache.activemq.artemis.core.transaction.TransactionOperation; import org.apache.activemq.artemis.core.transaction.TransactionOperationAbstract; import org.apache.activemq.artemis.core.transaction.TransactionPropertyIndexes; import org.apache.activemq.artemis.core.transaction.impl.TransactionImpl; import org.apache.activemq.artemis.utils.UUIDGenerator; import org.apache.activemq.artemis.utils.collections.TypedProperties; import org.jboss.logging.Logger; /** * This is the class that will make the routing to Queues and decide which consumer will get the messages * It's the queue component on distributing the messages * * */ public class PostOfficeImpl implements PostOffice, NotificationListener, BindingsFactory { private static final Logger logger = Logger.getLogger(PostOfficeImpl.class); public static final SimpleString HDR_RESET_QUEUE_DATA = new SimpleString("_AMQ_RESET_QUEUE_DATA"); public static final SimpleString HDR_RESET_QUEUE_DATA_COMPLETE = new SimpleString("_AMQ_RESET_QUEUE_DATA_COMPLETE"); public static final SimpleString BRIDGE_CACHE_STR = new SimpleString("BRIDGE."); private final AddressManager addressManager; private final QueueFactory queueFactory; private final StorageManager storageManager; private final PagingManager pagingManager; private volatile boolean started; private final ManagementService managementService; private Reaper reaperRunnable; private final long reaperPeriod; private final int reaperPriority; private final ConcurrentMap<SimpleString, DuplicateIDCache> duplicateIDCaches = new ConcurrentHashMap<>(); private final int idCacheSize; private final boolean persistIDCache; private final Map<SimpleString, QueueInfo> queueInfos = new HashMap<>(); private final Object notificationLock = new Object(); private final HierarchicalRepository<AddressSettings> addressSettingsRepository; private final ActiveMQServer server; private final Object addressLock = new Object(); public PostOfficeImpl(final ActiveMQServer server, final StorageManager storageManager, final PagingManager pagingManager, final QueueFactory bindableFactory, final ManagementService managementService, final long reaperPeriod, final int reaperPriority, final WildcardConfiguration wildcardConfiguration, final int idCacheSize, final boolean persistIDCache, final HierarchicalRepository<AddressSettings> addressSettingsRepository) { this.storageManager = storageManager; queueFactory = bindableFactory; this.managementService = managementService; this.pagingManager = pagingManager; this.reaperPeriod = reaperPeriod; this.reaperPriority = reaperPriority; if (wildcardConfiguration.isRoutingEnabled()) { addressManager = new WildcardAddressManager(this, wildcardConfiguration, storageManager); } else { addressManager = new SimpleAddressManager(this, wildcardConfiguration, storageManager); } this.idCacheSize = idCacheSize; this.persistIDCache = persistIDCache; this.addressSettingsRepository = addressSettingsRepository; this.server = server; } // ActiveMQComponent implementation --------------------------------------- @Override public synchronized void start() throws Exception { if (started) return; managementService.addNotificationListener(this); // Injecting the postoffice (itself) on queueFactory for paging-control queueFactory.setPostOffice(this); // The flag started needs to be set before starting the Reaper Thread // This is to avoid thread leakages where the Reaper would run beyond the life cycle of the // PostOffice started = true; } @Override public synchronized void stop() throws Exception { started = false; managementService.removeNotificationListener(this); if (reaperRunnable != null) reaperRunnable.stop(); addressManager.clear(); queueInfos.clear(); } @Override public boolean isStarted() { return started; } // NotificationListener implementation ------------------------------------- @Override public void onNotification(final Notification notification) { if (!(notification.getType() instanceof CoreNotificationType)) return; if (logger.isTraceEnabled()) { logger.trace("Receiving notification : " + notification + " on server " + this.server); } synchronized (notificationLock) { CoreNotificationType type = (CoreNotificationType) notification.getType(); switch (type) { case BINDING_ADDED: { TypedProperties props = notification.getProperties(); if (!props.containsProperty(ManagementHelper.HDR_BINDING_TYPE)) { throw ActiveMQMessageBundle.BUNDLE.bindingTypeNotSpecified(); } Integer bindingType = props.getIntProperty(ManagementHelper.HDR_BINDING_TYPE); if (bindingType == BindingType.DIVERT_INDEX) { // We don't propagate diverts return; } SimpleString routingName = props.getSimpleStringProperty(ManagementHelper.HDR_ROUTING_NAME); SimpleString clusterName = props.getSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME); SimpleString address = props.getSimpleStringProperty(ManagementHelper.HDR_ADDRESS); if (!props.containsProperty(ManagementHelper.HDR_BINDING_ID)) { throw ActiveMQMessageBundle.BUNDLE.bindingIdNotSpecified(); } long id = props.getLongProperty(ManagementHelper.HDR_BINDING_ID); SimpleString filterString = props.getSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING); if (!props.containsProperty(ManagementHelper.HDR_DISTANCE)) { logger.debug("PostOffice notification / BINDING_ADDED: HDR_DISANCE not specified, giving up propagation on notifications"); return; } int distance = props.getIntProperty(ManagementHelper.HDR_DISTANCE); QueueInfo info = new QueueInfo(routingName, clusterName, address, filterString, id, distance); queueInfos.put(clusterName, info); break; } case BINDING_REMOVED: { TypedProperties props = notification.getProperties(); if (!props.containsProperty(ManagementHelper.HDR_CLUSTER_NAME)) { logger.debug("PostOffice notification / BINDING_REMOVED: HDR_CLUSTER_NAME not specified, giving up propagation on notifications"); return; } SimpleString clusterName = props.getSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME); QueueInfo info = queueInfos.remove(clusterName); if (info == null) { logger.debug("PostOffice notification / BINDING_REMOVED: Cannot find queue info for queue \" + clusterName"); return; } break; } case CONSUMER_CREATED: { TypedProperties props = notification.getProperties(); if (!props.containsProperty(ManagementHelper.HDR_CLUSTER_NAME)) { logger.debug("PostOffice notification / CONSUMER_CREATED: No clusterName defined"); return; } SimpleString clusterName = props.getSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME); SimpleString filterString = props.getSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING); QueueInfo info = queueInfos.get(clusterName); if (info == null) { logger.debug("PostOffice notification / CONSUMER_CREATED: Could not find queue created on clusterName = " + clusterName); return; } info.incrementConsumers(); if (filterString != null) { List<SimpleString> filterStrings = info.getFilterStrings(); if (filterStrings == null) { filterStrings = new ArrayList<>(); info.setFilterStrings(filterStrings); } filterStrings.add(filterString); } if (!props.containsProperty(ManagementHelper.HDR_DISTANCE)) { logger.debug("PostOffice notification / CONSUMER_CREATED: No distance specified"); return; } int distance = props.getIntProperty(ManagementHelper.HDR_DISTANCE); if (distance > 0) { SimpleString queueName = props.getSimpleStringProperty(ManagementHelper.HDR_ROUTING_NAME); if (queueName == null) { logger.debug("PostOffice notification / CONSUMER_CREATED: No queue defined"); return; } Binding binding = getBinding(queueName); if (binding != null) { // We have a local queue Queue queue = (Queue) binding.getBindable(); AddressSettings addressSettings = addressSettingsRepository.getMatch(binding.getAddress().toString()); long redistributionDelay = addressSettings.getRedistributionDelay(); if (redistributionDelay != -1) { queue.addRedistributor(redistributionDelay); } } } break; } case CONSUMER_CLOSED: { TypedProperties props = notification.getProperties(); SimpleString clusterName = props.getSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME); if (clusterName == null) { logger.debug("PostOffice notification / CONSUMER_CLOSED: No cluster name"); return; } SimpleString filterString = props.getSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING); QueueInfo info = queueInfos.get(clusterName); if (info == null) { return; } info.decrementConsumers(); if (filterString != null) { List<SimpleString> filterStrings = info.getFilterStrings(); filterStrings.remove(filterString); } if (info.getNumberOfConsumers() == 0) { if (!props.containsProperty(ManagementHelper.HDR_DISTANCE)) { logger.debug("PostOffice notification / CONSUMER_CLOSED: HDR_DISTANCE not defined"); return; } int distance = props.getIntProperty(ManagementHelper.HDR_DISTANCE); if (distance == 0) { SimpleString queueName = props.getSimpleStringProperty(ManagementHelper.HDR_ROUTING_NAME); if (queueName == null) { logger.debug("PostOffice notification / CONSUMER_CLOSED: No queue name"); return; } Binding binding = getBinding(queueName); if (binding == null) { logger.debug("PostOffice notification / CONSUMER_CLOSED: Could not find queue " + queueName); return; } Queue queue = (Queue) binding.getBindable(); AddressSettings addressSettings = addressSettingsRepository.getMatch(binding.getAddress().toString()); long redistributionDelay = addressSettings.getRedistributionDelay(); if (redistributionDelay != -1) { queue.addRedistributor(redistributionDelay); } } } break; } default: { break; } } } } // PostOffice implementation ----------------------------------------------- @Override public void reloadAddressInfo(AddressInfo addressInfo) throws Exception { internalAddressInfo(addressInfo, true); } @Override public boolean addAddressInfo(AddressInfo addressInfo) throws Exception { return internalAddressInfo(addressInfo, false); } private boolean internalAddressInfo(AddressInfo addressInfo, boolean reload) throws Exception { synchronized (addressLock) { server.callBrokerPlugins(server.hasBrokerPlugins() ? plugin -> plugin.beforeAddAddress(addressInfo, reload) : null); boolean result; if (reload) { result = addressManager.reloadAddressInfo(addressInfo); } else { result = addressManager.addAddressInfo(addressInfo); } // only register address if it is new if (result) { try { if (!addressInfo.isInternal()) { managementService.registerAddress(addressInfo); } server.callBrokerPlugins(server.hasBrokerPlugins() ? plugin -> plugin.afterAddAddress(addressInfo, reload) : null); } catch (Exception e) { e.printStackTrace(); } } return result; } } @Override public QueueBinding updateQueue(SimpleString name, RoutingType routingType, Integer maxConsumers, Boolean purgeOnNoConsumers) throws Exception { synchronized (addressLock) { final QueueBinding queueBinding = (QueueBinding) addressManager.getBinding(name); if (queueBinding == null) { return null; } final Queue queue = queueBinding.getQueue(); boolean changed = false; //validate update if (maxConsumers != null && maxConsumers.intValue() != Queue.MAX_CONSUMERS_UNLIMITED) { final int consumerCount = queue.getConsumerCount(); if (consumerCount > maxConsumers) { throw ActiveMQMessageBundle.BUNDLE.invalidMaxConsumersUpdate(name.toString(), maxConsumers, consumerCount); } } if (routingType != null) { final SimpleString address = queue.getAddress(); final AddressInfo addressInfo = addressManager.getAddressInfo(address); final EnumSet<RoutingType> addressRoutingTypes = addressInfo.getRoutingTypes(); if (!addressRoutingTypes.contains(routingType)) { throw ActiveMQMessageBundle.BUNDLE.invalidRoutingTypeUpdate(name.toString(), routingType, address.toString(), addressRoutingTypes); } } //atomic update if (maxConsumers != null && queue.getMaxConsumers() != maxConsumers.intValue()) { changed = true; queue.setMaxConsumer(maxConsumers); } if (routingType != null && queue.getRoutingType() != routingType) { changed = true; queue.setRoutingType(routingType); } if (purgeOnNoConsumers != null && queue.isPurgeOnNoConsumers() != purgeOnNoConsumers.booleanValue()) { changed = true; queue.setPurgeOnNoConsumers(purgeOnNoConsumers); } if (changed) { final long txID = storageManager.generateID(); try { storageManager.updateQueueBinding(txID, queueBinding); storageManager.commitBindings(txID); } catch (Throwable throwable) { storageManager.rollback(txID); logger.warn(throwable.getMessage(), throwable); throw throwable; } } return queueBinding; } } @Override public AddressInfo updateAddressInfo(SimpleString addressName, EnumSet<RoutingType> routingTypes) throws Exception { synchronized (addressLock) { server.callBrokerPlugins(server.hasBrokerPlugins() ? plugin -> plugin.beforeUpdateAddress(addressName, routingTypes) : null); final AddressInfo address = addressManager.updateAddressInfo(addressName, routingTypes); server.callBrokerPlugins(server.hasBrokerPlugins() ? plugin -> plugin.afterUpdateAddress(address) : null); return address; } } @Override public AddressInfo removeAddressInfo(SimpleString address) throws Exception { synchronized (addressLock) { server.callBrokerPlugins(server.hasBrokerPlugins() ? plugin -> plugin.beforeRemoveAddress(address) : null); final Bindings bindingsForAddress = getDirectBindings(address); if (bindingsForAddress.getBindings().size() > 0) { throw ActiveMQMessageBundle.BUNDLE.addressHasBindings(address); } managementService.unregisterAddress(address); final AddressInfo addressInfo = addressManager.removeAddressInfo(address); server.callBrokerPlugins(server.hasBrokerPlugins() ? plugin -> plugin.afterRemoveAddress(address, addressInfo) : null); return addressInfo; } } @Override public AddressInfo getAddressInfo(SimpleString addressName) { synchronized (addressLock) { return addressManager.getAddressInfo(addressName); } } @Override public List<Queue> listQueuesForAddress(SimpleString address) throws Exception { Bindings bindingsForAddress = getBindingsForAddress(address); List<Queue> queues = new ArrayList<>(); for (Binding b : bindingsForAddress.getBindings()) { if (b instanceof QueueBinding) { Queue q = ((QueueBinding) b).getQueue(); queues.add(q); } } return queues; } // TODO - needs to be synchronized to prevent happening concurrently with activate() // (and possible removeBinding and other methods) // Otherwise can have situation where createQueue comes in before failover, then failover occurs // and post office is activated but queue remains unactivated after failover so delivery never occurs // even though failover is complete @Override public synchronized void addBinding(final Binding binding) throws Exception { addressManager.addBinding(binding); TypedProperties props = new TypedProperties(); props.putIntProperty(ManagementHelper.HDR_BINDING_TYPE, binding.getType().toInt()); props.putSimpleStringProperty(ManagementHelper.HDR_ADDRESS, binding.getAddress()); props.putSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME, binding.getClusterName()); props.putSimpleStringProperty(ManagementHelper.HDR_ROUTING_NAME, binding.getRoutingName()); props.putLongProperty(ManagementHelper.HDR_BINDING_ID, binding.getID()); props.putIntProperty(ManagementHelper.HDR_DISTANCE, binding.getDistance()); Filter filter = binding.getFilter(); if (filter != null) { props.putSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING, filter.getFilterString()); } String uid = UUIDGenerator.getInstance().generateStringUUID(); if (logger.isDebugEnabled()) { logger.debug("ClusterCommunication::Sending notification for addBinding " + binding + " from server " + server); } managementService.sendNotification(new Notification(uid, CoreNotificationType.BINDING_ADDED, props)); } @Override public synchronized Binding removeBinding(final SimpleString uniqueName, Transaction tx, boolean deleteData) throws Exception { addressSettingsRepository.clearCache(); Binding binding = addressManager.removeBinding(uniqueName, tx); if (binding == null) { throw new ActiveMQNonExistentQueueException(); } if (deleteData && addressManager.getBindingsForRoutingAddress(binding.getAddress()) == null) { pagingManager.deletePageStore(binding.getAddress()); deleteDuplicateCache(binding.getAddress()); } if (binding.getType() == BindingType.LOCAL_QUEUE) { Queue queue = (Queue) binding.getBindable(); managementService.unregisterQueue(uniqueName, binding.getAddress(), queue.getRoutingType()); } else if (binding.getType() == BindingType.DIVERT) { managementService.unregisterDivert(uniqueName, binding.getAddress()); } if (binding.getType() != BindingType.DIVERT) { TypedProperties props = new TypedProperties(); props.putSimpleStringProperty(ManagementHelper.HDR_ADDRESS, binding.getAddress()); props.putSimpleStringProperty(ManagementHelper.HDR_CLUSTER_NAME, binding.getClusterName()); props.putSimpleStringProperty(ManagementHelper.HDR_ROUTING_NAME, binding.getRoutingName()); props.putIntProperty(ManagementHelper.HDR_DISTANCE, binding.getDistance()); props.putLongProperty(ManagementHelper.HDR_BINDING_ID, binding.getID()); if (binding.getFilter() == null) { props.putSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING, null); } else { props.putSimpleStringProperty(ManagementHelper.HDR_FILTERSTRING, binding.getFilter().getFilterString()); } managementService.sendNotification(new Notification(null, CoreNotificationType.BINDING_REMOVED, props)); } binding.close(); return binding; } private void deleteDuplicateCache(SimpleString address) throws Exception { DuplicateIDCache cache = duplicateIDCaches.remove(address); if (cache != null) { cache.clear(); } cache = duplicateIDCaches.remove(BRIDGE_CACHE_STR.concat(address)); if (cache != null) { cache.clear(); } } @Override public boolean isAddressBound(final SimpleString address) throws Exception { Bindings bindings = getBindingsForAddress(address); return bindings != null && !bindings.getBindings().isEmpty(); } @Override public Bindings getBindingsForAddress(final SimpleString address) throws Exception { Bindings bindings = addressManager.getBindingsForRoutingAddress(address); if (bindings == null) { bindings = createBindings(address); } return bindings; } @Override public Bindings lookupBindingsForAddress(final SimpleString address) throws Exception { return addressManager.getBindingsForRoutingAddress(address); } @Override public Binding getBinding(final SimpleString name) { return addressManager.getBinding(name); } @Override public Bindings getMatchingBindings(final SimpleString address) throws Exception { return addressManager.getMatchingBindings(address); } @Override public Bindings getDirectBindings(final SimpleString address) throws Exception { return addressManager.getDirectBindings(address); } @Override public Map<SimpleString, Binding> getAllBindings() { return addressManager.getBindings(); } @Override public RoutingStatus route(final Message message, final boolean direct) throws Exception { return route(message, (Transaction) null, direct); } @Override public RoutingStatus route(final Message message, final Transaction tx, final boolean direct) throws Exception { return route(message, new RoutingContextImpl(tx), direct); } @Override public RoutingStatus route(Message message, Transaction tx, boolean direct, boolean rejectDuplicates) throws Exception { return route(message, new RoutingContextImpl(tx), direct, rejectDuplicates, null); } @Override public RoutingStatus route(final Message message, final Transaction tx, final boolean direct, final boolean rejectDuplicates, final Binding binding) throws Exception { return route(message, new RoutingContextImpl(tx), direct, rejectDuplicates, binding); } @Override public RoutingStatus route(final Message message, final RoutingContext context, final boolean direct) throws Exception { return route(message, context, direct, true, null); } @Override public RoutingStatus route(final Message message, final RoutingContext context, final boolean direct, boolean rejectDuplicates, final Binding bindingMove) throws Exception { RoutingStatus result = RoutingStatus.OK; // Sanity check if (message.getRefCount() > 0) { throw new IllegalStateException("Message cannot be routed more than once"); } setPagingStore(context.getAddress(message), message); AtomicBoolean startedTX = new AtomicBoolean(false); final SimpleString address = context.getAddress(message); applyExpiryDelay(message, address); if (!checkDuplicateID(message, context, rejectDuplicates, startedTX)) { return RoutingStatus.DUPLICATED_ID; } message.cleanupInternalProperties(); Bindings bindings = addressManager.getBindingsForRoutingAddress(context.getAddress(message)); // TODO auto-create queues here? // first check for the auto-queue creation thing if (bindings == null) { // There is no queue with this address, we will check if it needs to be created // if (queueCreator.create(address)) { // TODO: this is not working!!!! // reassign bindings if it was created // bindings = addressManager.getBindingsForRoutingAddress(address); // } } if (bindingMove != null) { bindingMove.route(message, context); } else if (bindings != null) { bindings.route(message, context); } else { // this is a debug and not warn because this could be a regular scenario on publish-subscribe queues (or topic subscriptions on JMS) if (logger.isDebugEnabled()) { logger.debug("Couldn't find any bindings for address=" + address + " on message=" + message); } } if (logger.isTraceEnabled()) { logger.trace("Message after routed=" + message); } if (context.getQueueCount() == 0) { // Send to DLA if appropriate AddressSettings addressSettings = addressSettingsRepository.getMatch(address.toString()); boolean sendToDLA = addressSettings.isSendToDLAOnNoRoute(); if (sendToDLA) { // Send to the DLA for the address SimpleString dlaAddress = addressSettings.getDeadLetterAddress(); if (logger.isDebugEnabled()) { logger.debug("sending message to dla address = " + dlaAddress + ", message=" + message); } if (dlaAddress == null) { result = RoutingStatus.NO_BINDINGS; ActiveMQServerLogger.LOGGER.noDLA(address); } else { message.referenceOriginalMessage(message, null); message.setAddress(dlaAddress); message.reencode(); route(message, context.getTransaction(), false); result = RoutingStatus.NO_BINDINGS_DLA; } } else { result = RoutingStatus.NO_BINDINGS; if (logger.isDebugEnabled()) { logger.debug("Message " + message + " is not going anywhere as it didn't have a binding on address:" + address); } if (message.isLargeMessage()) { ((LargeServerMessage) message).deleteFile(); } } } else { try { server.callBrokerPlugins(server.hasBrokerPlugins() ? plugin -> plugin.beforeMessageRoute(message, context, direct, rejectDuplicates) : null); processRoute(message, context, direct); final RoutingStatus finalResult = result; server.callBrokerPlugins(server.hasBrokerPlugins() ? plugin -> plugin.afterMessageRoute(message, context, direct, rejectDuplicates, finalResult) : null); } catch (ActiveMQAddressFullException e) { if (startedTX.get()) { context.getTransaction().rollback(); } else if (context.getTransaction() != null) { context.getTransaction().markAsRollbackOnly(e); } throw e; } } if (startedTX.get()) { context.getTransaction().commit(); } return result; } // HORNETQ-1029 private void applyExpiryDelay(Message message, SimpleString address) { long expirationOverride = addressSettingsRepository.getMatch(address.toString()).getExpiryDelay(); // A -1 <expiry-delay> means don't do anything if (expirationOverride >= 0) { // only override the exiration on messages where the expiration hasn't been set by the user if (message.getExpiration() == 0) { message.setExpiration(System.currentTimeMillis() + expirationOverride); } } } @Override public MessageReference reroute(final Message message, final Queue queue, final Transaction tx) throws Exception { setPagingStore(queue.getAddress(), message); MessageReference reference = MessageReference.Factory.createReference(message, queue); Long scheduledDeliveryTime = message.getScheduledDeliveryTime(); if (scheduledDeliveryTime != null) { reference.setScheduledDeliveryTime(scheduledDeliveryTime); } message.incrementDurableRefCount(); message.incrementRefCount(); if (tx == null) { queue.reload(reference); } else { List<MessageReference> refs = new ArrayList<>(1); refs.add(reference); tx.addOperation(new AddOperation(refs)); } return reference; } /** * The redistribution can't process the route right away as we may be dealing with a large message which will need to be processed on a different thread */ @Override public Pair<RoutingContext, Message> redistribute(final Message message, final Queue originatingQueue, final Transaction tx) throws Exception { // We have to copy the message and store it separately, otherwise we may lose remote bindings in case of restart before the message // arrived the target node // as described on https://issues.jboss.org/browse/JBPAPP-6130 Message copyRedistribute = message.copy(storageManager.generateID()); Bindings bindings = addressManager.getBindingsForRoutingAddress(originatingQueue.getAddress()); if (bindings != null) { RoutingContext context = new RoutingContextImpl(tx); boolean routed = bindings.redistribute(copyRedistribute, originatingQueue, context); if (routed) { return new Pair<>(context, copyRedistribute); } } return null; } @Override public DuplicateIDCache getDuplicateIDCache(final SimpleString address) { DuplicateIDCache cache = duplicateIDCaches.get(address); if (cache == null) { cache = new DuplicateIDCacheImpl(address, idCacheSize, storageManager, persistIDCache); DuplicateIDCache oldCache = duplicateIDCaches.putIfAbsent(address, cache); if (oldCache != null) { cache = oldCache; } } return cache; } public ConcurrentMap<SimpleString, DuplicateIDCache> getDuplicateIDCaches() { return duplicateIDCaches; } @Override public Object getNotificationLock() { return notificationLock; } @Override public Set<SimpleString> getAddresses() { return addressManager.getAddresses(); } @Override public SimpleString getMatchingQueue(SimpleString address, RoutingType routingType) throws Exception { return addressManager.getMatchingQueue(address, routingType); } @Override public SimpleString getMatchingQueue(SimpleString address, SimpleString queueName, RoutingType routingType) throws Exception { return addressManager.getMatchingQueue(address, queueName, routingType); } @Override public void sendQueueInfoToQueue(final SimpleString queueName, final SimpleString address) throws Exception { // We send direct to the queue so we can send it to the same queue that is bound to the notifications address - // this is crucial for ensuring // that queue infos and notifications are received in a contiguous consistent stream Binding binding = addressManager.getBinding(queueName); if (binding == null) { throw new IllegalStateException("Cannot find queue " + queueName); } if (logger.isDebugEnabled()) { logger.debug("PostOffice.sendQueueInfoToQueue on server=" + this.server + ", queueName=" + queueName + " and address=" + address); } Queue queue = (Queue) binding.getBindable(); // Need to lock to make sure all queue info and notifications are in the correct order with no gaps synchronized (notificationLock) { // First send a reset message Message message = new CoreMessage(storageManager.generateID(), 50); message.setAddress(queueName); message.putBooleanProperty(PostOfficeImpl.HDR_RESET_QUEUE_DATA, true); routeQueueInfo(message, queue, false); for (QueueInfo info : queueInfos.values()) { if (logger.isTraceEnabled()) { logger.trace("QueueInfo on sendQueueInfoToQueue = " + info); } if (info.matchesAddress(address)) { message = createQueueInfoMessage(CoreNotificationType.BINDING_ADDED, queueName); message.putStringProperty(ManagementHelper.HDR_ADDRESS, info.getAddress()); message.putStringProperty(ManagementHelper.HDR_CLUSTER_NAME, info.getClusterName()); message.putStringProperty(ManagementHelper.HDR_ROUTING_NAME, info.getRoutingName()); message.putLongProperty(ManagementHelper.HDR_BINDING_ID, info.getID()); message.putStringProperty(ManagementHelper.HDR_FILTERSTRING, info.getFilterString()); message.putIntProperty(ManagementHelper.HDR_DISTANCE, info.getDistance()); routeQueueInfo(message, queue, true); int consumersWithFilters = info.getFilterStrings() != null ? info.getFilterStrings().size() : 0; for (int i = 0; i < info.getNumberOfConsumers() - consumersWithFilters; i++) { message = createQueueInfoMessage(CoreNotificationType.CONSUMER_CREATED, queueName); message.putStringProperty(ManagementHelper.HDR_ADDRESS, info.getAddress()); message.putStringProperty(ManagementHelper.HDR_CLUSTER_NAME, info.getClusterName()); message.putStringProperty(ManagementHelper.HDR_ROUTING_NAME, info.getRoutingName()); message.putIntProperty(ManagementHelper.HDR_DISTANCE, info.getDistance()); routeQueueInfo(message, queue, true); } if (info.getFilterStrings() != null) { for (SimpleString filterString : info.getFilterStrings()) { message = createQueueInfoMessage(CoreNotificationType.CONSUMER_CREATED, queueName); message.putStringProperty(ManagementHelper.HDR_ADDRESS, info.getAddress()); message.putStringProperty(ManagementHelper.HDR_CLUSTER_NAME, info.getClusterName()); message.putStringProperty(ManagementHelper.HDR_ROUTING_NAME, info.getRoutingName()); message.putStringProperty(ManagementHelper.HDR_FILTERSTRING, filterString); message.putIntProperty(ManagementHelper.HDR_DISTANCE, info.getDistance()); routeQueueInfo(message, queue, true); } } } } Message completeMessage = new CoreMessage(storageManager.generateID(), 50); completeMessage.setAddress(queueName); completeMessage.putBooleanProperty(PostOfficeImpl.HDR_RESET_QUEUE_DATA_COMPLETE, true); routeQueueInfo(completeMessage, queue, false); } } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { return "PostOfficeImpl [server=" + server + "]"; } // Private ----------------------------------------------------------------- private void setPagingStore(SimpleString address, Message message) throws Exception { PagingStore store = pagingManager.getPageStore(address); message.setContext(store); } private void routeQueueInfo(final Message message, final Queue queue, final boolean applyFilters) throws Exception { if (!applyFilters || queue.getFilter() == null || queue.getFilter().match(message)) { RoutingContext context = new RoutingContextImpl(null); queue.route(message, context); processRoute(message, context, false); } } private static class PageDelivery extends TransactionOperationAbstract { private final Set<Queue> queues = new HashSet<>(); public void addQueues(List<Queue> queueList) { queues.addAll(queueList); } @Override public void afterCommit(Transaction tx) { // We need to try delivering async after paging, or nothing may start a delivery after paging since nothing is // going towards the queues // The queue will try to depage case it's empty for (Queue queue : queues) { queue.deliverAsync(); } } @Override public List<MessageReference> getRelatedMessageReferences() { return Collections.emptyList(); } } @Override public void processRoute(final Message message, final RoutingContext context, final boolean direct) throws Exception { final List<MessageReference> refs = new ArrayList<>(); Transaction tx = context.getTransaction(); Long deliveryTime = message.getScheduledDeliveryTime(); for (Map.Entry<SimpleString, RouteContextList> entry : context.getContexListing().entrySet()) { PagingStore store = pagingManager.getPageStore(entry.getKey()); if (storageManager.addToPage(store, message, context.getTransaction(), entry.getValue())) { if (message.isLargeMessage()) { confirmLargeMessageSend(tx, message); } // We need to kick delivery so the Queues may check for the cursors case they are empty schedulePageDelivery(tx, entry); continue; } for (Queue queue : entry.getValue().getNonDurableQueues()) { MessageReference reference = MessageReference.Factory.createReference(message, queue); if (deliveryTime != null) { reference.setScheduledDeliveryTime(deliveryTime); } refs.add(reference); message.incrementRefCount(); } Iterator<Queue> iter = entry.getValue().getDurableQueues().iterator(); while (iter.hasNext()) { Queue queue = iter.next(); MessageReference reference = MessageReference.Factory.createReference(message, queue); if (context.isAlreadyAcked(context.getAddress(message), queue)) { reference.setAlreadyAcked(); if (tx != null) { queue.acknowledge(tx, reference); } } if (deliveryTime != null) { reference.setScheduledDeliveryTime(deliveryTime); } refs.add(reference); if (message.isDurable()) { int durableRefCount = message.incrementDurableRefCount(); if (durableRefCount == 1) { if (tx != null) { storageManager.storeMessageTransactional(tx.getID(), message); } else { storageManager.storeMessage(message); } if (message.isLargeMessage()) { confirmLargeMessageSend(tx, message); } } if (tx != null) { storageManager.storeReferenceTransactional(tx.getID(), queue.getID(), message.getMessageID()); tx.setContainsPersistent(); } else { storageManager.storeReference(queue.getID(), message.getMessageID(), !iter.hasNext()); } if (deliveryTime > 0) { if (tx != null) { storageManager.updateScheduledDeliveryTimeTransactional(tx.getID(), reference); } else { storageManager.updateScheduledDeliveryTime(reference); } } } message.incrementRefCount(); } } if (tx != null) { tx.addOperation(new AddOperation(refs)); } else { // This will use the same thread if there are no pending operations // avoiding a context switch on this case storageManager.afterCompleteOperations(new IOCallback() { @Override public void onError(final int errorCode, final String errorMessage) { ActiveMQServerLogger.LOGGER.ioErrorAddingReferences(errorCode, errorMessage); } @Override public void done() { addReferences(refs, direct); } }); } } /** * @param tx * @param message * @throws Exception */ private void confirmLargeMessageSend(Transaction tx, final Message message) throws Exception { LargeServerMessage largeServerMessage = (LargeServerMessage) message; if (largeServerMessage.getPendingRecordID() >= 0) { if (tx == null) { storageManager.confirmPendingLargeMessage(largeServerMessage.getPendingRecordID()); } else { storageManager.confirmPendingLargeMessageTX(tx, largeServerMessage.getMessageID(), largeServerMessage.getPendingRecordID()); } largeServerMessage.setPendingRecordID(-1); } } /** * This will kick a delivery async on the queue, so the queue may have a chance to depage messages * * @param tx * @param entry */ private void schedulePageDelivery(Transaction tx, Map.Entry<SimpleString, RouteContextList> entry) { if (tx != null) { PageDelivery delivery = (PageDelivery) tx.getProperty(TransactionPropertyIndexes.PAGE_DELIVERY); if (delivery == null) { delivery = new PageDelivery(); tx.putProperty(TransactionPropertyIndexes.PAGE_DELIVERY, delivery); tx.addOperation(delivery); } delivery.addQueues(entry.getValue().getDurableQueues()); delivery.addQueues(entry.getValue().getNonDurableQueues()); } else { List<Queue> durableQueues = entry.getValue().getDurableQueues(); List<Queue> nonDurableQueues = entry.getValue().getNonDurableQueues(); final List<Queue> queues = new ArrayList<>(durableQueues.size() + nonDurableQueues.size()); queues.addAll(durableQueues); queues.addAll(nonDurableQueues); storageManager.afterCompleteOperations(new IOCallback() { @Override public void onError(int errorCode, String errorMessage) { } @Override public void done() { for (Queue queue : queues) { // in case of paging, we need to kick asynchronous delivery to try delivering queue.deliverAsync(); } } }); } } private boolean checkDuplicateID(final Message message, final RoutingContext context, boolean rejectDuplicates, AtomicBoolean startedTX) throws Exception { // Check the DuplicateCache for the Bridge first Object bridgeDup = message.removeExtraBytesProperty(Message.HDR_BRIDGE_DUPLICATE_ID); if (bridgeDup != null) { // if the message is being sent from the bridge, we just ignore the duplicate id, and use the internal one byte[] bridgeDupBytes = (byte[]) bridgeDup; DuplicateIDCache cacheBridge = getDuplicateIDCache(BRIDGE_CACHE_STR.concat(context.getAddress(message).toString())); if (context.getTransaction() == null) { context.setTransaction(new TransactionImpl(storageManager)); startedTX.set(true); } if (!cacheBridge.atomicVerify(bridgeDupBytes, context.getTransaction())) { context.getTransaction().rollback(); startedTX.set(false); message.decrementRefCount(); return false; } } else { // if used BridgeDuplicate, it's not going to use the regular duplicate // since this will would break redistribution (re-setting the duplicateId) byte[] duplicateIDBytes = message.getDuplicateIDBytes(); DuplicateIDCache cache = null; boolean isDuplicate = false; if (duplicateIDBytes != null) { cache = getDuplicateIDCache(context.getAddress(message)); isDuplicate = cache.contains(duplicateIDBytes); if (rejectDuplicates && isDuplicate) { ActiveMQServerLogger.LOGGER.duplicateMessageDetected(message); String warnMessage = "Duplicate message detected - message will not be routed. Message information:" + message.toString(); if (context.getTransaction() != null) { context.getTransaction().markAsRollbackOnly(new ActiveMQDuplicateIdException(warnMessage)); } message.decrementRefCount(); return false; } } if (cache != null && !isDuplicate) { if (context.getTransaction() == null) { // We need to store the duplicate id atomically with the message storage, so we need to create a tx for this context.setTransaction(new TransactionImpl(storageManager)); startedTX.set(true); } cache.addToCache(duplicateIDBytes, context.getTransaction(), false); } } return true; } /** * @param refs */ private void addReferences(final List<MessageReference> refs, final boolean direct) { for (MessageReference ref : refs) { ref.getQueue().addTail(ref, direct); } } /** * The expiry scanner can't be started until the whole server has been started other wise you may get races */ @Override public synchronized void startExpiryScanner() { if (reaperPeriod > 0) { if (reaperRunnable != null) reaperRunnable.stop(); reaperRunnable = new Reaper(server.getScheduledPool(), server.getExecutorFactory().getExecutor(), reaperPeriod, TimeUnit.MILLISECONDS, false); reaperRunnable.start(); } } private Message createQueueInfoMessage(final NotificationType type, final SimpleString queueName) { Message message = new CoreMessage().initBuffer(50).setMessageID(storageManager.generateID()); message.setAddress(queueName); String uid = UUIDGenerator.getInstance().generateStringUUID(); message.putStringProperty(ManagementHelper.HDR_NOTIFICATION_TYPE, new SimpleString(type.toString())); message.putLongProperty(ManagementHelper.HDR_NOTIFICATION_TIMESTAMP, System.currentTimeMillis()); message.putStringProperty(new SimpleString("foobar"), new SimpleString(uid)); return message; } private final class Reaper extends ActiveMQScheduledComponent { Reaper(ScheduledExecutorService scheduledExecutorService, Executor executor, long checkPeriod, TimeUnit timeUnit, boolean onDemand) { super(scheduledExecutorService, executor, checkPeriod, timeUnit, onDemand); } @Override public void run() { // The reaper thread should be finished case the PostOffice is gone // This is to avoid leaks on PostOffice between stops and starts Map<SimpleString, Binding> nameMap = addressManager.getBindings(); List<Queue> queues = new ArrayList<>(); for (Binding binding : nameMap.values()) { if (binding.getType() == BindingType.LOCAL_QUEUE) { Queue queue = (Queue) binding.getBindable(); queues.add(queue); } } for (Queue queue : queues) { try { queue.expireReferences(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.errorExpiringMessages(e); } } } } public static final class AddOperation implements TransactionOperation { private final List<MessageReference> refs; AddOperation(final List<MessageReference> refs) { this.refs = refs; } @Override public void afterCommit(final Transaction tx) { for (MessageReference ref : refs) { if (!ref.isAlreadyAcked()) { ref.getQueue().addTail(ref, false); } } } @Override public void afterPrepare(final Transaction tx) { for (MessageReference ref : refs) { if (ref.isAlreadyAcked()) { ref.getQueue().referenceHandled(); ref.getQueue().incrementMesssagesAdded(); } } } @Override public void afterRollback(final Transaction tx) { } @Override public void beforeCommit(final Transaction tx) throws Exception { } @Override public void beforePrepare(final Transaction tx) throws Exception { } @Override public void beforeRollback(final Transaction tx) throws Exception { // Reverse the ref counts, and paging sizes for (MessageReference ref : refs) { Message message = ref.getMessage(); if (message.isDurable() && ref.getQueue().isDurable()) { message.decrementDurableRefCount(); } message.decrementRefCount(); } } @Override public List<MessageReference> getRelatedMessageReferences() { return refs; } @Override public List<MessageReference> getListOnConsumer(long consumerID) { return Collections.emptyList(); } } @Override public Bindings createBindings(final SimpleString address) throws Exception { GroupingHandler groupingHandler = server.getGroupingHandler(); BindingsImpl bindings = new BindingsImpl(address, groupingHandler, pagingManager.getPageStore(address)); if (groupingHandler != null) { groupingHandler.addListener(bindings); } return bindings; } // For tests only public AddressManager getAddressManager() { return addressManager; } public ActiveMQServer getServer() { return server; } }
package com.afollestad.cabinet.utils; import android.content.Context; import android.text.format.DateFormat; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; /** * Utilities for getting human readable time strings. * * @author Aidan Follestad (afollestad) */ public class TimeUtils { private static final int SECONDS_IN_MINUTE = 60; private static final int MINUTES_IN_HOUR = 60; private static final int HOURS_IN_DAY = 24; private static final int DAYS_IN_YEAR = 365; private static final int MILLIS_IN_SECOND = 1000; private static final long MILLISECONDS_IN_MINUTE = (long) MILLIS_IN_SECOND * SECONDS_IN_MINUTE; private static final long MILLISECONDS_IN_HOUR = (long) MILLIS_IN_SECOND * SECONDS_IN_MINUTE * MINUTES_IN_HOUR; private static final long MILLISECONDS_IN_DAY = (long) MILLIS_IN_SECOND * SECONDS_IN_MINUTE * MINUTES_IN_HOUR * HOURS_IN_DAY; private static final long MILLISECONDS_IN_YEAR = (long) MILLIS_IN_SECOND * SECONDS_IN_MINUTE * MINUTES_IN_HOUR * HOURS_IN_DAY * DAYS_IN_YEAR; public static String toStringLong(Context context, Date date) { GregorianCalendar cal = new GregorianCalendar(); cal.setTime(date); return toStringLong(context, cal); } /** * Gets a human-readable long time string (includes both the time and date, always). */ public static String toStringLong(Context context, Calendar date) { Calendar now = Calendar.getInstance(); int hourInt; final boolean is24 = DateFormat.is24HourFormat(context); if (is24) hourInt = date.get(Calendar.HOUR_OF_DAY); else hourInt = date.get(Calendar.HOUR); int minuteInt = date.get(Calendar.MINUTE); String dayStr = getNumberWithSuffix(date.get(Calendar.DAY_OF_MONTH)); String timeStr = ""; if (hourInt == 0) timeStr += "12"; else timeStr += "" + hourInt; if (minuteInt < 10) timeStr += ":0" + minuteInt; else timeStr += ":" + minuteInt; if (!is24) { if (date.get(Calendar.AM_PM) == Calendar.AM) timeStr += "AM"; else timeStr += "PM"; } if (now.get(Calendar.YEAR) == date.get(Calendar.YEAR)) { // Same year return timeStr + " " + convertMonth(date.get(Calendar.MONTH), false) + " " + dayStr; } else { // Different year return timeStr + " " + convertMonth(date.get(Calendar.MONTH), false) + " " + dayStr + ", " + date.get(Calendar.YEAR); } } public static String toString(Context context, Date date, boolean includeTime, boolean shortMonth) { GregorianCalendar cal = new GregorianCalendar(); cal.setTime(date); return toString(context, cal, includeTime, shortMonth); } /** * Gets a human-readable time string (includes both the time and date, excluded certain parts if possible). * * @param shortMonth Whether or display a long or short month string (e.g. 'January' or 'Jan'). */ public static String toString(Context context, Calendar date, boolean includeTime, boolean shortMonth) { Calendar now = Calendar.getInstance(); int hourInt; final boolean is24 = DateFormat.is24HourFormat(context); if (is24) hourInt = date.get(Calendar.HOUR_OF_DAY); else hourInt = date.get(Calendar.HOUR); int minuteInt = date.get(Calendar.MINUTE); String dayStr = getNumberWithSuffix(date.get(Calendar.DAY_OF_MONTH)); String timeStr = ""; if (hourInt == 0) timeStr += "12"; else timeStr += "" + hourInt; if (minuteInt < 10) timeStr += ":0" + minuteInt; else timeStr += ":" + minuteInt; if (!is24) { if (date.get(Calendar.AM_PM) == Calendar.AM) timeStr += "AM"; else timeStr += "PM"; } if (now.get(Calendar.YEAR) == date.get(Calendar.YEAR)) { // Same year if (now.get(Calendar.MONTH) == date.get(Calendar.MONTH)) { // Same year, same month if (now.get(Calendar.DAY_OF_YEAR) == date.get(Calendar.DAY_OF_YEAR)) { // Same year, same month, same day return timeStr; } else { // Same year, same month, different day String toReturn = ""; if (includeTime) toReturn = timeStr + " "; toReturn += convertMonth(date.get(Calendar.MONTH), shortMonth) + " " + dayStr; return toReturn; } } else { // Different month, same year String toReturn = ""; if (includeTime) toReturn = timeStr + " "; toReturn += convertMonth(date.get(Calendar.MONTH), shortMonth) + " " + dayStr; return toReturn; } } else { // Different year String year = Integer.toString(date.get(Calendar.YEAR)); String toReturn = ""; if (includeTime) toReturn = timeStr + " "; toReturn += convertMonth(date.get(Calendar.MONTH), shortMonth) + " " + dayStr + ", " + year; return toReturn; } } public static String toStringDate(Date date, boolean shortMonth, boolean alwaysIncludeYear) { GregorianCalendar cal = new GregorianCalendar(); cal.setTime(date); return toStringDate(cal, shortMonth, alwaysIncludeYear); } /** * Gets a human-readable date string (month, day, and year). * * @param shortMonth Whether or display a long or short month string (e.g. 'January' or 'Jan'). * @param alwaysIncludeYear Include the year even if it's the current year. */ public static String toStringDate(Calendar time, boolean shortMonth, boolean alwaysIncludeYear) { Calendar now = Calendar.getInstance(); String day = getNumberWithSuffix(time.get(Calendar.DAY_OF_MONTH)); if (now.get(Calendar.YEAR) == time.get(Calendar.YEAR) && !alwaysIncludeYear) { // Same year if (now.get(Calendar.MONTH) == time.get(Calendar.MONTH)) { // Same year, same month return convertMonth(time.get(Calendar.MONTH), shortMonth) + " " + day; } else { // Different month, same year return convertMonth(time.get(Calendar.MONTH), shortMonth) + " " + day; } } else { // Different year String year = Integer.toString(time.get(Calendar.YEAR)); return convertMonth(time.get(Calendar.MONTH), shortMonth) + " " + day + ", " + year; } } public static String toStringTime(Context context, Calendar time) { int hourInt; final boolean is24 = DateFormat.is24HourFormat(context); if (is24) hourInt = time.get(Calendar.HOUR_OF_DAY); else hourInt = time.get(Calendar.HOUR); int minuteInt = time.get(Calendar.MINUTE); String timeStr = ""; if (hourInt == 0) timeStr += "12"; else timeStr += "" + hourInt; if (minuteInt < 10) timeStr += ":0" + minuteInt; else timeStr += ":" + minuteInt; if (!is24) { if (time.get(Calendar.AM_PM) == Calendar.AM) timeStr += "AM"; else timeStr += "PM"; } return timeStr; } public static String toStringTime(Context context, Date time) { GregorianCalendar cal = new GregorianCalendar(); cal.setTime(time); return toStringTime(context, cal); } public static String toStringShort(Date date) { GregorianCalendar cal = new GregorianCalendar(); cal.setTime(date); return toStringShort(cal); } public static String toStringShort(Calendar time) { Calendar now = Calendar.getInstance(); long diff = now.getTimeInMillis() - time.getTimeInMillis(); long years = diff / MILLISECONDS_IN_YEAR; if (years == 0) { long days = diff / MILLISECONDS_IN_DAY; if (days == 0) { long hours = diff / MILLISECONDS_IN_HOUR; if (hours == 0) { long minutes = diff / MILLISECONDS_IN_MINUTE; if (minutes == 0) { long seconds = diff / MILLIS_IN_SECOND; return seconds + "s"; } else { return minutes + "m"; } } else { return hours + "h"; } } else { if (days == 7) return "1w"; else if (days > 7) { long weeks = days / 7; days = days % 7; String str = weeks + "w"; if (days > 0) str += days + "d"; return str; } else return days + "d"; } } else { return years + "y"; } } private static String convertMonth(int month, boolean useShort) { String monthStr; switch (month) { default: monthStr = "January"; break; case Calendar.FEBRUARY: monthStr = "February"; break; case Calendar.MARCH: monthStr = "March"; break; case Calendar.APRIL: monthStr = "April"; break; case Calendar.MAY: monthStr = "May"; break; case Calendar.JUNE: monthStr = "June"; break; case Calendar.JULY: monthStr = "July"; break; case Calendar.AUGUST: monthStr = "August"; break; case Calendar.SEPTEMBER: monthStr = "September"; break; case Calendar.OCTOBER: monthStr = "October"; break; case Calendar.NOVEMBER: monthStr = "November"; break; case Calendar.DECEMBER: monthStr = "December"; break; } if (useShort) monthStr = monthStr.substring(0, 3); return monthStr; } private static String getNumberWithSuffix(int number) { int j = number % 10; if (j == 1 && number != 11) { return number + "st"; } if (j == 2 && number != 12) { return number + "nd"; } if (j == 3 && number != 13) { return number + "rd"; } return number + "th"; } }
/* * EVE Swagger Interface * An OpenAPI for EVE Online * * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package net.troja.eve.esi.api; import java.util.ArrayList; import java.util.List; import net.troja.eve.esi.ApiException; import net.troja.eve.esi.model.BloodlinesResponse; import net.troja.eve.esi.model.CategoryResponse; import net.troja.eve.esi.model.ConstellationResponse; import net.troja.eve.esi.model.FactionsResponse; import net.troja.eve.esi.model.GraphicResponse; import net.troja.eve.esi.model.GroupResponse; import net.troja.eve.esi.model.MoonResponse; import net.troja.eve.esi.model.PlanetResponse; import net.troja.eve.esi.model.RacesResponse; import net.troja.eve.esi.model.RegionResponse; import net.troja.eve.esi.model.StarResponse; import net.troja.eve.esi.model.StargateResponse; import net.troja.eve.esi.model.StationResponse; import net.troja.eve.esi.model.SystemJumpsResponse; import net.troja.eve.esi.model.SystemKillsResponse; import net.troja.eve.esi.model.SystemResponse; import net.troja.eve.esi.model.TypeResponse; import net.troja.eve.esi.model.UniverseAncestriesResponse; import net.troja.eve.esi.model.UniverseAsteroidBeltsResponse; import net.troja.eve.esi.model.UniverseIdsResponse; import net.troja.eve.esi.model.UniverseNamesResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; import org.junit.Ignore; import org.junit.Test; /** * API tests for UniverseApi */ public class UniverseApiTest extends GeneralApiTest { private final UniverseApi api = new UniverseApi(); /** * Get ancestries * * Get all character ancestries --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseAncestriesTest() throws ApiException { List<UniverseAncestriesResponse> response = api.getUniverseAncestries(LANGUAGE, DATASOURCE, null, LANGUAGE); assertThat(response.size(), equalTo(43)); final UniverseAncestriesResponse ancestriesResponse = response.get(0); assertThat(ancestriesResponse.getBloodlineId(), greaterThan(0)); } /** * Get asteroid belt information * * Get information on an asteroid belt --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseAsteroidBeltsAsteroidBeltIdTest() throws ApiException { Integer asteroidBeltId = 40000018; UniverseAsteroidBeltsResponse response = api.getUniverseAsteroidBeltsAsteroidBeltId(asteroidBeltId, DATASOURCE, null); assertThat(response, notNullValue()); assertThat(response.getSystemId(), equalTo(30000001)); } /** * Get bloodlines * * Get a list of bloodlines --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseBloodlinesTest() throws ApiException { final List<BloodlinesResponse> response = api.getUniverseBloodlines(LANGUAGE, DATASOURCE, null, LANGUAGE); assertThat(response.size(), equalTo(18)); final BloodlinesResponse bloodlinesResponse = response.get(0); assertThat(bloodlinesResponse.getBloodlineId(), greaterThan(0)); } /** * Get item categories * * Get a list of item categories --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseCategoriesTest() throws ApiException { final List<Integer> response = api.getUniverseCategories(DATASOURCE, null); assertThat(response.size(), equalTo(45)); } /** * Get item category information * * Get information of an item category --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseCategoriesCategoryIdTest() throws ApiException { final Integer categoryId = 8; final CategoryResponse response = api.getUniverseCategoriesCategoryId(categoryId, LANGUAGE, DATASOURCE, null, LANGUAGE); assertThat(response.getName(), equalTo("Charge")); } /** * Get constellations * * Get a list of constellations --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseConstellationsTest() throws ApiException { final List<Integer> response = api.getUniverseConstellations(DATASOURCE, null); assertThat(response.size(), equalTo(1174)); } /** * Get constellation information * * Get information on a constellation --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseConstellationsConstellationIdTest() throws ApiException { final Integer constellationId = 20000006; final ConstellationResponse response = api.getUniverseConstellationsConstellationId(constellationId, LANGUAGE, DATASOURCE, null, LANGUAGE); assertThat(response.getName(), equalTo("Sazdih")); } /** * Get factions * * Get a list of factions --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseFactionsTest() throws ApiException { final List<FactionsResponse> response = api.getUniverseFactions(LANGUAGE, DATASOURCE, null, LANGUAGE); assertThat(response.size(), equalTo(26)); final FactionsResponse factionsResponse = response.get(0); assertThat(factionsResponse.getName(), equalTo("Amarr Empire")); } /** * Get graphics * * Get a list of graphics --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseGraphicsTest() throws ApiException { final List<Integer> response = api.getUniverseGraphics(DATASOURCE, null); assertThat(response.size(), greaterThan(0)); } /** * Get graphic information * * Get information on a graphic --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseGraphicsGraphicIdTest() throws ApiException { final Integer graphicId = 1843; final GraphicResponse response = api.getUniverseGraphicsGraphicId(graphicId, DATASOURCE, null); assertThat(response.getGraphicId(), equalTo(graphicId)); assertThat(response.getSofFationName(), equalTo("creodron")); } /** * Get item groups * * Get a list of item groups --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseGroupsTest() throws ApiException { final Integer page = null; final List<Integer> response = api.getUniverseGroups(DATASOURCE, null, page); assertThat(response.size(), equalTo(1000)); } /** * Get item group information * * Get information on an item group --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseGroupsGroupIdTest() throws ApiException { final Integer groupId = 5; final GroupResponse response = api.getUniverseGroupsGroupId(groupId, LANGUAGE, DATASOURCE, null, LANGUAGE); assertThat(response.getName(), equalTo("Solar System")); } /** * Get moon information * * Get information on a moon --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseMoonsMoonIdTest() throws ApiException { final Integer moonId = 40001588; final MoonResponse response = api.getUniverseMoonsMoonId(moonId, DATASOURCE, null); assertThat(response.getName(), equalTo("Fovihi III - Moon 2")); } /** * Get planet information * * Get information on a planet --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniversePlanetsPlanetIdTest() throws ApiException { final Integer planetId = 40001593; final PlanetResponse response = api.getUniversePlanetsPlanetId(planetId, DATASOURCE, null); assertThat(response.getName(), equalTo("Fovihi V")); } /** * Get character races * * Get a list of character races --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseRacesTest() throws ApiException { final List<RacesResponse> response = api.getUniverseRaces(LANGUAGE, DATASOURCE, null, LANGUAGE); assertThat(response.size(), equalTo(4)); final RacesResponse racesResponse = response.get(0); assertThat(racesResponse.getName(), equalTo("Caldari")); } /** * Get regions * * Get a list of regions --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseRegionsTest() throws ApiException { final List<Integer> response = api.getUniverseRegions(DATASOURCE, null); assertThat(response.size(), equalTo(112)); } /** * Get region information * * Get information on a region --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseRegionsRegionIdTest() throws ApiException { final Integer regionId = REGION_ID_THE_FORGE; final RegionResponse response = api.getUniverseRegionsRegionId(regionId, LANGUAGE, DATASOURCE, null, LANGUAGE); assertThat(response.getName(), equalTo("The Forge")); } /** * Get stargate information * * Get information on a stargate --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseStargatesStargateIdTest() throws ApiException { final Integer stargateId = 50000277; final StargateResponse response = api.getUniverseStargatesStargateId(stargateId, DATASOURCE, null); assertThat(response.getName(), equalTo("Stargate (Kiereend)")); } /** * Get star information * * Get information on a star --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseStarsStarIdTest() throws ApiException { final int starId = 40001581; final StarResponse response = api.getUniverseStarsStarId(starId, DATASOURCE, null); assertThat(response.getName(), equalTo("Fovihi - Star")); } /** * Get station information * * Get information on a station --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseStationsStationIdTest() throws ApiException { final Integer stationId = 60012541; final StationResponse response = api.getUniverseStationsStationId(stationId, DATASOURCE, null); assertThat(response.getName(), equalTo("Fovihi V - Ammatar Consulate Bureau")); } /** * List all public structures * * List all public structures --- This route is cached for up to 3600 seconds * * @throws ApiException * if the Api call fails */ @Test public void getUniverseStructuresTest() throws ApiException { final List<Long> response = api.getUniverseStructures(DATASOURCE, null, null); assertThat(response.size(), greaterThan(0)); } /** * Get structure information * * Returns information on requested structure if you are on the ACL. Otherwise, returns \&quot;Forbidden\&quot; for all inputs. --- This route is cached for up to 3600 seconds * * @throws ApiException * if the Api call fails */ @Test @Ignore("Can't be tested") public void getUniverseStructuresStructureIdTest() throws ApiException { } /** * Get system jumps * * Get the number of jumps in solar systems within the last hour ending at the timestamp of the Last-Modified header, excluding wormhole space. Only systems with jumps will be listed --- This route is cached for up to 3600 seconds * * @throws ApiException * if the Api call fails */ @Test public void getUniverseSystemJumpsTest() throws ApiException { final List<SystemJumpsResponse> response = api.getUniverseSystemJumps(DATASOURCE, null); assertThat(response.size(), greaterThan(0)); assertThat(response.get(0).getSystemId(), greaterThan(30000)); } /** * Get system kills * * Get the number of ship, pod and NPC kills per solar system within the last hour ending at the timestamp of the Last-Modified header, excluding wormhole space. Only systems with kills will be listed --- This route is cached for up to 3600 seconds * * @throws ApiException * if the Api call fails */ @Test public void getUniverseSystemKillsTest() throws ApiException { final List<SystemKillsResponse> response = api.getUniverseSystemKills(DATASOURCE, null); assertThat(response.size(), greaterThan(0)); assertThat(response.get(0).getSystemId(), greaterThan(30000)); } /** * Get solar systems * * Get a list of solar systems --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseSystemsTest() throws ApiException { final List<Integer> response = api.getUniverseSystems(DATASOURCE, null); assertThat(response.size(), equalTo(8485)); } /** * Get solar system information * * Get information on a solar system. --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseSystemsSystemIdTest() throws ApiException { final Integer systemId = 30000023; final SystemResponse response = api.getUniverseSystemsSystemId(systemId, LANGUAGE, DATASOURCE, null, LANGUAGE); assertThat(response.getName(), equalTo("Fovihi")); } /** * Get types * * Get information on a solar system. --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseTypesTest() throws ApiException { final Integer page = null; final List<Integer> response = api.getUniverseTypes(DATASOURCE, null, page); assertThat(response.size(), equalTo(1000)); } /** * Get type information * * Get information on a type --- This route expires daily at 11:05 * * @throws ApiException * if the Api call fails */ @Test public void getUniverseTypesTypeIdTest() throws ApiException { final Integer typeId = TYPE_ID_VELDSPAR; final TypeResponse response = api.getUniverseTypesTypeId(typeId, LANGUAGE, DATASOURCE, null, LANGUAGE); assertThat(response.getName(), equalTo(NAME_VELDSPAR)); } /** * Bulk names to IDs * * Resolve a set of names to IDs in the following categories: agents, alliances, characters, constellations, corporations factions, inventory_types, regions, stations, and systems. Only exact matches will be returned. All names searched for are cached for 12 hours --- * * @throws ApiException * if the Api call fails */ @Test public void postUniverseIdsTest() throws ApiException { List<String> names = new ArrayList<>(); names.add("Aarnaras Wasken"); //Agents names.add(ALLIANCE_NAME_TRI); //Alliances names.add("GoldenGnu"); //Character names.add("Kimotoro"); //Constellations names.add(CORPORATION_NAME_TBD); //Corporations names.add("Caldari State"); //Factions names.add(NAME_VELDSPAR); //Inventory Types names.add("The Forge"); //Regions //names.add("Jita"); //Systems //names.add("Station - Jita IV - Moon 4 - Caldari Navy Assembly Plant"); //Stations UniverseIdsResponse response = api.postUniverseIds(names, LANGUAGE, DATASOURCE, LANGUAGE); assertThat(response.getAgents().size(), greaterThan(0)); assertThat(response.getAlliances().size(), greaterThan(0)); assertThat(response.getCharacters().size(), greaterThan(0)); assertThat(response.getConstellations().size(), greaterThan(0)); assertThat(response.getCorporations().size(), greaterThan(0)); assertThat(response.getFactions().size(), greaterThan(0)); assertThat(response.getInventoryTypes().size(), greaterThan(0)); assertThat(response.getRegions().size(), greaterThan(0)); //assertThat(response.getSystems().size(), greaterThan(0)); //assertThat(response.getStations().size(), greaterThan(0)); } /** * Get names and categories for a set of ID&#39;s * * Resolve a set of IDs to names and categories. Supported ID&#39;s for resolving are: Characters, Corporations, Alliances, Stations, Solar Systems, Constellations, Regions, Types --- * * @throws ApiException * if the Api call fails */ @Test public void postUniverseNamesTest() throws ApiException { final List<Integer> ids = new ArrayList<>(); ids.add(CHARACTER_ID_CHRIBBA); final List<UniverseNamesResponse> response = api.postUniverseNames(ids, DATASOURCE); assertThat(response.size(), equalTo(1)); final UniverseNamesResponse result = response.get(0); assertThat(result.getName(), equalTo(CHARACTER_NAME_CHRIBBA)); } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.ide.common.repository; import com.android.annotations.NonNull; import com.android.annotations.Nullable; import com.android.builder.model.AndroidArtifact; import com.android.builder.model.AndroidLibrary; import com.android.builder.model.AndroidProject; import com.android.builder.model.Variant; import com.android.ide.common.resources.ResourceUrl; import com.android.resources.ResourceType; import com.google.common.base.Charsets; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.io.Files; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; import static com.android.SdkConstants.FN_RESOURCE_TEXT; /** * Class which provides information about whether Android resources for a given library are * public or private. */ public abstract class ResourceVisibilityLookup { /** * Returns true if the given resource is private * * @param type the type of the resource * @param name the resource field name of the resource (in other words, for * style Theme:Variant.Cls the name would be Theme_Variant_Cls; you can use * {@link LintUtils#g} * @return true if the given resource is private */ public abstract boolean isPrivate( @NonNull ResourceType type, @NonNull String name); /** * Returns true if the given resource is private in the library * * @param url the resource URL * @return true if the given resource is private */ public boolean isPrivate(@NonNull ResourceUrl url) { assert !url.framework; // Framework resources are not part of the library return isPrivate(url.type, url.name); } /** * For a private resource, return the {@link AndroidLibrary} that the resource was defined as * private in * * @param type the type of the resource * @param name the name of the resource * @return the library which defines the resource as private */ @Nullable public abstract AndroidLibrary getPrivateIn(@NonNull ResourceType type, @NonNull String name); /** Returns true if this repository does not declare any resources to be private */ public abstract boolean isEmpty(); /** * Creates a {@link ResourceVisibilityLookup} for a given library. * <p> * NOTE: The {@link Provider} class can be used to share/cache {@link ResourceVisibilityLookup} * instances, e.g. when you have library1 and library2 each referencing libraryBase, the {@link * Provider} will ensure that a the libraryBase data is shared. * * @param library the library * @return a corresponding {@link ResourceVisibilityLookup} */ @NonNull public static ResourceVisibilityLookup create(@NonNull AndroidLibrary library) { return new LibraryResourceVisibility(library); } /** * Creates a {@link ResourceVisibilityLookup} for the set of libraries. * <p> * NOTE: The {@link Provider} class can be used to share/cache {@link ResourceVisibilityLookup} * instances, e.g. when you have library1 and library2 each referencing libraryBase, the {@link * Provider} will ensure that a the libraryBase data is shared. * * @param libraries the list of libraries * @param provider an optional manager instance for caching of individual libraries, if any * @return a corresponding {@link ResourceVisibilityLookup} */ @NonNull public static ResourceVisibilityLookup create(@NonNull List<AndroidLibrary> libraries, @Nullable Provider provider) { List<ResourceVisibilityLookup> list = Lists.newArrayListWithExpectedSize(libraries.size()); for (AndroidLibrary library : libraries) { ResourceVisibilityLookup v = provider != null ? provider.get(library) : create(library); if (!v.isEmpty()) { list.add(v); } } return new MultipleLibraryResourceVisibility(list); } public static final ResourceVisibilityLookup NONE = new ResourceVisibilityLookup() { @Override public boolean isPrivate(@NonNull ResourceType type, @NonNull String name) { return false; } @Nullable @Override public AndroidLibrary getPrivateIn(@NonNull ResourceType type, @NonNull String name) { return null; } @Override public boolean isEmpty() { return true; } }; /** Searches multiple libraries */ private static class MultipleLibraryResourceVisibility extends ResourceVisibilityLookup { private final List<ResourceVisibilityLookup> mRepositories; public MultipleLibraryResourceVisibility(List<ResourceVisibilityLookup> repositories) { mRepositories = repositories; } // It's anticipated that these methods will be called a lot (e.g. in inner loops // iterating over all resources matching code completion etc) so since we know // that our list has random access, avoid creating iterators here @SuppressWarnings("ForLoopReplaceableByForEach") @Override public boolean isPrivate(@NonNull ResourceType type, @NonNull String name) { for (int i = 0, n = mRepositories.size(); i < n; i++) { if (mRepositories.get(i).isPrivate(type, name)) { return true; } } return false; } @SuppressWarnings("ForLoopReplaceableByForEach") @Override public boolean isEmpty() { for (int i = 0, n = mRepositories.size(); i < n; i++) { if (!mRepositories.get(i).isEmpty()) { return false; } } return true; } @SuppressWarnings("ForLoopReplaceableByForEach") @Nullable @Override public AndroidLibrary getPrivateIn(@NonNull ResourceType type, @NonNull String name) { for (int i = 0, n = mRepositories.size(); i < n; i++) { ResourceVisibilityLookup r = mRepositories.get(i); if (r.isPrivate(type, name)) { return r.getPrivateIn(type, name); } } return null; } } /** * Provider which keeps a set of {@link ResourceVisibilityLookup} instances around for * repeated queries, including from different libraries that may share dependencies */ public static class Provider { /** * We store lookup instances for multiple separate types of keys here: * {@link AndroidLibrary}, {@link AndroidArtifact}, and {@link Variant} */ private Map<Object, ResourceVisibilityLookup> mInstances = Maps.newHashMap(); /** * Looks up a (possibly cached) {@link ResourceVisibilityLookup} for the given {@link * AndroidLibrary} * * @param library the library * @return the corresponding {@link ResourceVisibilityLookup} */ @NonNull public ResourceVisibilityLookup get(@NonNull AndroidLibrary library) { ResourceVisibilityLookup visibility = mInstances.get(library); if (visibility == null) { visibility = new LibraryResourceVisibility(library); if (visibility.isEmpty()) { visibility = NONE; } List<? extends AndroidLibrary> dependsOn = library.getLibraryDependencies(); if (!dependsOn.isEmpty()) { List<ResourceVisibilityLookup> list = Lists.newArrayListWithExpectedSize(dependsOn.size() + 1); list.add(visibility); for (AndroidLibrary d : dependsOn) { ResourceVisibilityLookup v = get(d); if (!v.isEmpty()) { list.add(v); } } if (list.size() > 1) { visibility = new MultipleLibraryResourceVisibility(list); } } mInstances.put(library, visibility); } return visibility; } /** * Looks up a (possibly cached) {@link ResourceVisibilityLookup} for the given {@link * AndroidArtifact} * * @param artifact the artifact * @return the corresponding {@link ResourceVisibilityLookup} */ @NonNull public ResourceVisibilityLookup get(@NonNull AndroidArtifact artifact) { ResourceVisibilityLookup visibility = mInstances.get(artifact); if (visibility == null) { Collection<AndroidLibrary> dependsOn = artifact.getDependencies().getLibraries(); List<ResourceVisibilityLookup> list = Lists.newArrayListWithExpectedSize(dependsOn.size() + 1); for (AndroidLibrary d : dependsOn) { ResourceVisibilityLookup v = get(d); if (!v.isEmpty()) { list.add(v); } } int size = list.size(); visibility = size == 0 ? NONE : size == 1 ? list.get(0) : new MultipleLibraryResourceVisibility(list); mInstances.put(artifact, visibility); } return visibility; } /** * Returns true if the given Gradle model is compatible with public resources. * (Older models than 1.3 will throw exceptions if we attempt to for example * query the public resource file location. * * @param project the project to check * @return true if the model is recent enough to support resource visibility queries */ public static boolean isVisibilityAwareModel(@NonNull AndroidProject project) { String modelVersion = project.getModelVersion(); // getApiVersion doesn't work prior to 1.2, and API level must be at least 3 return !(modelVersion.startsWith("1.0") || modelVersion.startsWith("1.1")) && project.getApiVersion() >= 3; } /** * Looks up a (possibly cached) {@link ResourceVisibilityLookup} for the given {@link * AndroidArtifact} * * @param project the project * @return the corresponding {@link ResourceVisibilityLookup} */ @NonNull public ResourceVisibilityLookup get( @NonNull AndroidProject project, @NonNull Variant variant) { ResourceVisibilityLookup visibility = mInstances.get(variant); if (visibility == null) { if (isVisibilityAwareModel(project)) { AndroidArtifact artifact = variant.getMainArtifact(); visibility = get(artifact); } else { visibility = NONE; } mInstances.put(variant, visibility); } return visibility; } } /** Visibility data for a single library */ private static class LibraryResourceVisibility extends ResourceVisibilityLookup { private final AndroidLibrary mLibrary; private final Multimap<String, ResourceType> mAll; private final Multimap<String, ResourceType> mPublic; private LibraryResourceVisibility(@NonNull AndroidLibrary library) { mLibrary = library; mPublic = computeVisibilityMap(); //noinspection VariableNotUsedInsideIf if (mPublic != null) { mAll = computeAllMap(); } else { mAll = null; } } @Override public boolean isEmpty() { return mPublic == null; } @Nullable @Override public AndroidLibrary getPrivateIn(@NonNull ResourceType type, @NonNull String name) { if (isPrivate(type, name)) { return mLibrary; } return null; } /** * Returns a map from name to applicable resource types where the presence of the type+name * combination means that the corresponding resource is explicitly public. * * If the result is null, there is no {@code public.txt} definition for this library, so all * resources should be taken to be public. * * @return a map from name to resource type for public resources in this library */ @Nullable private Multimap<String, ResourceType> computeVisibilityMap() { File publicResources = mLibrary.getPublicResources(); if (!publicResources.exists()) { return null; } try { List<String> lines = Files.readLines(publicResources, Charsets.UTF_8); Multimap<String, ResourceType> result = ArrayListMultimap.create(lines.size(), 2); for (String line : lines) { // These files are written by code in MergedResourceWriter#postWriteAction // Format for each line: <type><space><name>\n // Therefore, we don't expect/allow variations in the format (we don't // worry about extra spaces needing to be trimmed etc) int index = line.indexOf(' '); if (index == -1 || line.isEmpty()) { continue; } String typeString = line.substring(0, index); ResourceType type = ResourceType.getEnum(typeString); if (type == null) { // This could in theory happen if in the future a new ResourceType is // introduced, and a newer version of the Gradle build system writes the // name of this type into the public.txt file, and an older version of // the IDE then attempts to read it. Just skip these symbols. continue; } String name = line.substring(index + 1); result.put(name, type); } return result; } catch (IOException ignore) { } return null; } /** * Returns a map from name to resource types for all resources known to this library. This * is used to make sure that when the {@link #isPrivate(ResourceType, String)} query method * is called, it can tell the difference between a resource implicitly private by not being * declared as public and a resource unknown to this library (e.g. defined by a different * library or the user's own project resources.) * * @return a map from name to resource type for all resources in this library */ @Nullable private Multimap<String, ResourceType> computeAllMap() { // getSymbolFile() is not defined in AndroidLibrary, only in the subclass LibraryBundle File symbolFile = new File(mLibrary.getPublicResources().getParentFile(), FN_RESOURCE_TEXT); if (!symbolFile.exists()) { return null; } try { List<String> lines = Files.readLines(symbolFile, Charsets.UTF_8); Multimap<String, ResourceType> result = ArrayListMultimap.create(lines.size(), 2); ResourceType previousType = null; String previousTypeString = ""; int lineIndex = 1; final int count = lines.size(); for (; lineIndex <= count; lineIndex++) { String line = lines.get(lineIndex - 1); if (line.startsWith("int ")) { // not int[] definitions for styleables // format is "int <type> <class> <name> <value>" int typeStart = 4; int typeEnd = line.indexOf(' ', typeStart); // Items are sorted by type, so we can avoid looping over types in // ResourceType.getEnum() for each line by sharing type in each section String typeString = line.substring(typeStart, typeEnd); ResourceType type; if (typeString.equals(previousTypeString)) { type = previousType; } else { type = ResourceType.getEnum(typeString); previousTypeString = typeString; previousType = type; } if (type == null) { // some newly introduced type continue; } int nameStart = typeEnd + 1; int nameEnd = line.indexOf(' ', nameStart); String name = line.substring(nameStart, nameEnd); result.put(name, type); } } return result; } catch (IOException ignore) { } return null; } /** * Returns true if the given resource is private in the library * * @param type the type of the resource * @param name the name of the resource * @return true if the given resource is private */ @Override public boolean isPrivate(@NonNull ResourceType type, @NonNull String name) { //noinspection SimplifiableIfStatement if (mPublic == null) { // No public definitions: Everything assumed to be public return false; } //noinspection SimplifiableIfStatement if (!mAll.containsEntry(name, type)) { // Don't respond to resource URLs that are not part of this project // since we won't have private information on them return false; } return !mPublic.containsEntry(name, type); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.sql.fluent; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.management.polling.PollResult; import com.azure.core.util.Context; import com.azure.core.util.polling.PollerFlux; import com.azure.core.util.polling.SyncPoller; import com.azure.resourcemanager.sql.fluent.models.ExtendedServerBlobAuditingPolicyInner; import java.nio.ByteBuffer; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; /** * An instance of this class provides access to all the operations defined in ExtendedServerBlobAuditingPoliciesClient. */ public interface ExtendedServerBlobAuditingPoliciesClient { /** * Gets an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server's blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<ExtendedServerBlobAuditingPolicyInner>> getWithResponseAsync( String resourceGroupName, String serverName); /** * Gets an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server's blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<ExtendedServerBlobAuditingPolicyInner> getAsync(String resourceGroupName, String serverName); /** * Gets an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server's blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) ExtendedServerBlobAuditingPolicyInner get(String resourceGroupName, String serverName); /** * Gets an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server's blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<ExtendedServerBlobAuditingPolicyInner> getWithResponse( String resourceGroupName, String serverName, Context context); /** * Creates or updates an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @param parameters An extended server blob auditing policy. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> createOrUpdateWithResponseAsync( String resourceGroupName, String serverName, ExtendedServerBlobAuditingPolicyInner parameters); /** * Creates or updates an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @param parameters An extended server blob auditing policy. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) PollerFlux<PollResult<ExtendedServerBlobAuditingPolicyInner>, ExtendedServerBlobAuditingPolicyInner> beginCreateOrUpdateAsync( String resourceGroupName, String serverName, ExtendedServerBlobAuditingPolicyInner parameters); /** * Creates or updates an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @param parameters An extended server blob auditing policy. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) SyncPoller<PollResult<ExtendedServerBlobAuditingPolicyInner>, ExtendedServerBlobAuditingPolicyInner> beginCreateOrUpdate( String resourceGroupName, String serverName, ExtendedServerBlobAuditingPolicyInner parameters); /** * Creates or updates an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @param parameters An extended server blob auditing policy. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) SyncPoller<PollResult<ExtendedServerBlobAuditingPolicyInner>, ExtendedServerBlobAuditingPolicyInner> beginCreateOrUpdate( String resourceGroupName, String serverName, ExtendedServerBlobAuditingPolicyInner parameters, Context context); /** * Creates or updates an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @param parameters An extended server blob auditing policy. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<ExtendedServerBlobAuditingPolicyInner> createOrUpdateAsync( String resourceGroupName, String serverName, ExtendedServerBlobAuditingPolicyInner parameters); /** * Creates or updates an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @param parameters An extended server blob auditing policy. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) ExtendedServerBlobAuditingPolicyInner createOrUpdate( String resourceGroupName, String serverName, ExtendedServerBlobAuditingPolicyInner parameters); /** * Creates or updates an extended server's blob auditing policy. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @param parameters An extended server blob auditing policy. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended server blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) ExtendedServerBlobAuditingPolicyInner createOrUpdate( String resourceGroupName, String serverName, ExtendedServerBlobAuditingPolicyInner parameters, Context context); /** * Lists extended auditing settings of a server. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of server extended auditing settings. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<ExtendedServerBlobAuditingPolicyInner> listByServerAsync(String resourceGroupName, String serverName); /** * Lists extended auditing settings of a server. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of server extended auditing settings. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<ExtendedServerBlobAuditingPolicyInner> listByServer(String resourceGroupName, String serverName); /** * Lists extended auditing settings of a server. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of server extended auditing settings. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<ExtendedServerBlobAuditingPolicyInner> listByServer( String resourceGroupName, String serverName, Context context); }