gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
package com.nomagic.magicdraw.plugins.ifml;
import com.nomagic.magicdraw.plugins.ifml.javacomponents.IfmlFrame;
import com.nomagic.magicdraw.uml.symbols.PresentationElement;
import com.nomagic.magicdraw.uml.symbols.shapes.ShapeElement;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.NamedElement;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.border.TitledBorder;
import javax.swing.plaf.InsetsUIResource;
import java.awt.*;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.List;
/**
*
*
* @author Mindaugas Genutis
*/
public class JComponentUpdater implements JComponentHandler
{
private JComponent mComponent;
private ShapeElement presentationElement;
private NamedElement mElement;
public JComponentUpdater(JComponent component, PresentationElement pElement)
{
mComponent = component;
presentationElement = (ShapeElement) pElement;
mElement = (NamedElement)pElement.getElement();
}
@Override
public Object handleButton()
{
JButton button = (JButton) mComponent;
button.setText(IfmlModelingHelper.getText(mElement));
IfmlModelingHelper.setComponentIcon(mElement, button);
IfmlModelingHelper.setComponentEnabled(mElement, button);
IfmlModelingHelper.setButtonSelected(mElement, button);
return button;
}
@Override
public Object handleCheckBox()
{
JCheckBox checkBox = (JCheckBox) mComponent;
checkBox.setText(IfmlModelingHelper.getText(mElement));
checkBox.setOpaque(false);
IfmlModelingHelper.setComponentEnabled(mElement, checkBox);
IfmlModelingHelper.setButtonSelected(mElement, checkBox);
return checkBox;
}
@Override
public Object handleComboBox()
{
JComboBox comboBox = (JComboBox) mComponent;
DefaultComboBoxModel model = new DefaultComboBoxModel();
model.addElement(IfmlModelingHelper.getText(mElement));
comboBox.setModel(model);
IfmlModelingHelper.setComponentEnabled(mElement, comboBox);
comboBox.setEditable(false);
return comboBox;
}
@Override
public Object handleHyperlink()
{
JLabel hyperlink = (JLabel) mComponent;
hyperlink.setText("<html><U>" + IfmlModelingHelper.getText(mElement)+ "</html>");
IfmlModelingHelper.setComponentIcon(mElement, hyperlink);
IfmlModelingHelper.setComponentEnabled(mElement, hyperlink);
hyperlink.setForeground(IfmlModelingHelper.isInactive(mElement)
? (Color) UIManager.get("Label.disabledForeground") : Color.BLUE);
return hyperlink;
}
@Override
public Object handleLabel()
{
JLabel label = (JLabel) mComponent;
label.setText(IfmlModelingHelper.getText(mElement));
IfmlModelingHelper.setComponentIcon(mElement, label);
IfmlModelingHelper.setComponentEnabled(mElement, label);
return label;
}
@Override
public Object handleMenuBar()
{
JMenuBar menuBar = (JMenuBar) mComponent;
menuBar.removeAll();
List<String> menus = IfmlModelingHelper.getMenuBarMenus(mElement);
for (String menuTitle : menus)
{
menuBar.add(new JMenu(menuTitle));
}
return menuBar;
}
@Override
public Object handlePasswordField()
{
JPasswordField passwordField = (JPasswordField) mComponent;
passwordField.setText(IfmlModelingHelper.getText(mElement));
passwordField.setColumns(10);
if (!IfmlModelingHelper.isPasswordFieldHidden(mElement))
{
passwordField.setEchoChar((char) 0);
}
else
{
passwordField.setEchoChar('*');
}
IfmlModelingHelper.setComponentEnabled(mElement, passwordField);
return passwordField;
}
@Override
public Object handleProgressBar()
{
JProgressBar progressBar = (JProgressBar) mComponent;
int oldOrientation = progressBar.getOrientation();
int orientation = IfmlModelingHelper.getOrientation(IfmlModelingHelper.isVertical(mElement));
progressBar.setOrientation(orientation);
changeSizeByOrientation(presentationElement, oldOrientation, orientation);
progressBar.setMinimum(IfmlModelingHelper.getMinValue(mElement));
progressBar.setMaximum(IfmlModelingHelper.getMaxValue(mElement));
progressBar.setValue(IfmlModelingHelper.getProgressBarValue(mElement));
progressBar.setString(Integer.toString(progressBar.getValue()));
progressBar.setStringPainted(true);
return progressBar;
}
@Override
public Object handleRadioButton()
{
JRadioButton radioButton = (JRadioButton) mComponent;
radioButton.setText(IfmlModelingHelper.getText(mElement));
radioButton.setOpaque(false);
IfmlModelingHelper.setComponentEnabled(mElement, radioButton);
IfmlModelingHelper.setButtonSelected(mElement, radioButton);
return radioButton;
}
@Override
public Object handleScrollBar()
{
JScrollBar scrollBar = (JScrollBar) mComponent;
int oldOrientation = scrollBar.getOrientation();
int orientation = IfmlModelingHelper.getOrientation(IfmlModelingHelper.isVertical(mElement));
scrollBar.setOrientation(orientation);
changeSizeByOrientation(presentationElement, oldOrientation, orientation);
IfmlModelingHelper.setComponentEnabled(mElement, scrollBar);
return scrollBar;
}
@Override
public Object handleSeparator()
{
JSeparator separator = (JSeparator) mComponent;
int oldOrientation = separator.getOrientation();
int orientation = IfmlModelingHelper.getOrientation(IfmlModelingHelper.isVertical(mElement));
separator.setOrientation(orientation);
changeSizeByOrientation(presentationElement, oldOrientation, orientation);
return separator;
}
@Override
public Object handleSlider()
{
JSlider slider = (JSlider) mComponent;
int oldOrientation = slider.getOrientation();
int orientation = IfmlModelingHelper.getOrientation(IfmlModelingHelper.isVertical(mElement));
slider.setOrientation(orientation);
changeSizeByOrientation(presentationElement, oldOrientation, orientation);
slider.setMinimum(IfmlModelingHelper.getMinValue(mElement));
slider.setMaximum(IfmlModelingHelper.getMaxValue(mElement));
int minimum = slider.getMinimum();
int maximum = slider.getMaximum();
Hashtable<Integer, JLabel> labelTable = new Hashtable<Integer, JLabel>();
labelTable.put(minimum, new JLabel(Integer.toString(minimum)));
labelTable.put(maximum, new JLabel(Integer.toString(maximum)));
List<String> values = IfmlModelingHelper.getSliderValues(mElement);
//remove empty strings
while (values.remove("")) ;
for (String value : values)
{
String[] splitted = value.split("\n");
if (splitted.length >= 1)
{
Integer key = Integer.valueOf(splitted[0]);
String label = splitted.length >= 2 ? splitted[1] : key.toString();
labelTable.put(key, new JLabel(label));
}
}
//set spacing in ranges
int spacing = IfmlModelingHelper.getSliderMajorTickSpacing(mElement);
if (spacing <= maximum)
{
slider.setMajorTickSpacing(spacing);
}
else
{
slider.setMajorTickSpacing(maximum);
}
slider.setInverted(IfmlModelingHelper.isSliderInverted(mElement));
//set position in ranges
int position = IfmlModelingHelper.getSliderValue(mElement);
if (position > maximum)
{
slider.setValue(maximum);
}
else if (position < minimum)
{
slider.setValue(minimum);
}
else
{
slider.setValue(position);
}
// labelTable.put(slider.getValue(), new JLabel(Integer.toString(slider.getValue())));
slider.setLabelTable(labelTable);
IfmlModelingHelper.setComponentEnabled(mElement, slider);
slider.setPaintTicks(true);
slider.setPaintLabels(true);
return slider;
}
@Override
public Object handleSpinner()
{
JSpinner spinner = (JSpinner) mComponent;
List<String> values = new ArrayList<String>();
String text = IfmlModelingHelper.getText(mElement);
values.add(text);
SpinnerListModel spinnerModel = new SpinnerListModel();
spinnerModel.setList(values);
spinner.setModel(spinnerModel);
((JTextField)spinner.getEditor()).setText(text);
IfmlModelingHelper.setComponentEnabled(mElement, spinner);
return spinner;
}
@Override
public Object handleTextArea()
{
JScrollPane textFieldScrollPane = (JScrollPane)mComponent;
JTextArea textArea = (JTextArea) textFieldScrollPane.getViewport().getView();
textArea.setText(IfmlModelingHelper.getText(mElement));
textFieldScrollPane.setHorizontalScrollBarPolicy(IfmlModelingHelper.HORIZONTAL_SCROLLBAR_POLICY.get(IfmlModelingHelper.getHorizontalScrollbarPolicy(mElement)));
textFieldScrollPane.setVerticalScrollBarPolicy(IfmlModelingHelper.VERTICAL_SCROLLBAR_POLICY.get(IfmlModelingHelper.getVerticalScrollbarPolicy(mElement)));
IfmlModelingHelper.setComponentEnabled(mElement, textArea);
return textFieldScrollPane;
}
@Override
public Object handleTextField()
{
JTextField textField = (JTextField) mComponent;
textField.setText(IfmlModelingHelper.getText(mElement));
IfmlModelingHelper.setComponentEnabled(mElement, textField);
return textField;
}
@Override
public Object handleFrame()
{
IfmlFrame frame = (IfmlFrame) mComponent;
frame.setTitle(IfmlModelingHelper.getText(mElement));
IfmlModelingHelper.setComponentIcon(mElement, frame);
frame.setMaximizable(IfmlModelingHelper.isFrameMaximize(mElement));
frame.setIconifiable(IfmlModelingHelper.isFrameMinimize(mElement));
IfmlModelingHelper.setComponentEnabled(mElement, frame);
return frame;
}
@Override
public Object handleGroupBox()
{
JPanel panel = (JPanel) mComponent;
TitledBorder titledBorder = BorderFactory.createTitledBorder(IfmlModelingHelper.getBorder(presentationElement), IfmlModelingHelper.getText(mElement));
if (!IfmlModelingHelper.isGroupBoxNamed(mElement))
{
titledBorder.setTitle("");
}
panel.setBorder(titledBorder);
return panel;
}
@Override
public Object handleList()
{
JScrollPane listScrollPane = (JScrollPane) mComponent;
JList list = (JList) listScrollPane.getViewport().getView();
DefaultListModel model = new DefaultListModel();
List<String> values = IfmlModelingHelper.getListValues(mElement);
for (String value : values)
{
model.addElement(value);
}
list.setModel(model);
list.setSelectedValue(IfmlModelingHelper.getListSelectedValue(mElement), true);
listScrollPane.setHorizontalScrollBarPolicy(IfmlModelingHelper.HORIZONTAL_SCROLLBAR_POLICY.get(IfmlModelingHelper.getHorizontalScrollbarPolicy(mElement)));
listScrollPane.setVerticalScrollBarPolicy(IfmlModelingHelper.VERTICAL_SCROLLBAR_POLICY.get(IfmlModelingHelper.getVerticalScrollbarPolicy(mElement)));
IfmlModelingHelper.setComponentEnabled(mElement, list);
return listScrollPane;
}
@Override
public Object handlePanel()
{
return mComponent;
}
@Override
public Object handleScrollPane()
{
JScrollPane scrollPane = (JScrollPane) mComponent;
scrollPane.setHorizontalScrollBarPolicy(IfmlModelingHelper.HORIZONTAL_SCROLLBAR_POLICY.get(IfmlModelingHelper.getScrollPaneHorizontalScrollbarPolicy(mElement)));
scrollPane.setVerticalScrollBarPolicy(IfmlModelingHelper.VERTICAL_SCROLLBAR_POLICY.get(IfmlModelingHelper.getScrollPanelVerticalScrollbarPolicy(mElement)));
return scrollPane;
}
@Override
public Object handleTabbedPane()
{
JTabbedPane tabbedPane = (JTabbedPane) mComponent;
tabbedPane.removeAll();
UIManager.put("TabbedPane.contentBorderInsets", new InsetsUIResource(-2, 0, 0, 0));
tabbedPane.updateUI();
Border border = BorderFactory.createLineBorder(Color.GRAY);
List<String> tabs = IfmlModelingHelper.getTabbedPaneTabs(mElement);
for (String tab : tabs)
{
JPanel panel = new JPanel();
panel.setBorder(border);
tabbedPane.addTab(tab, panel);
}
tabbedPane.setTabPlacement(IfmlModelingHelper.TAB_POSITIONS.get(IfmlModelingHelper.getTabbedPanePlacement(mElement)));
int active = tabbedPane.indexOfTab(IfmlModelingHelper.getTabbedPaneActiveTab(mElement));
if (active == -1 && tabbedPane.getTabCount() > 0)
{
active = 0;
}
tabbedPane.setSelectedIndex(active);
return tabbedPane;
}
@Override
public Object handleTable()
{
JScrollPane tableScrollPane = (JScrollPane) mComponent;
JViewport header = tableScrollPane.getColumnHeader();
if (header != null)
{
header.setVisible(IfmlModelingHelper.isTableColumnHeaderVisible(mElement));
}
tableScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER);
TableHelper.updateTableViews(IfmlModelingHelper.getTableComponent(mElement));
return tableScrollPane;
}
@Override
public Object handleToolBar()
{
return mComponent;
}
@Override
public Object handleTree()
{
JScrollPane treeScrollPane = (JScrollPane) mComponent;
JTree tree = (JTree) treeScrollPane.getViewport().getView();
TreeHelper.updateTree(treeScrollPane, tree, mElement);
return treeScrollPane;
}
private void changeSizeByOrientation(PresentationElement presentationElement, int previousOrientation, int newOrientation)
{
if(newOrientation != previousOrientation)
{
Rectangle bounds = presentationElement.getBounds();
if(newOrientation == Adjustable.HORIZONTAL && bounds.width < bounds.height ||
newOrientation == Adjustable.VERTICAL && bounds.width > bounds.height)
{
presentationElement.setSize(bounds.height, bounds.width);
}
}
}
}
|
|
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel.udt.nio;
import com.barchart.udt.TypeUDT;
import com.barchart.udt.nio.SocketChannelUDT;
import io.netty.buffer.ByteBuf;
import io.netty.channel.Channel;
import io.netty.channel.ChannelException;
import io.netty.channel.ChannelMetadata;
import io.netty.channel.ChannelOutboundBuffer;
import io.netty.util.internal.SocketUtils;
import io.netty.channel.nio.AbstractNioMessageChannel;
import io.netty.channel.udt.DefaultUdtChannelConfig;
import io.netty.channel.udt.UdtChannel;
import io.netty.channel.udt.UdtChannelConfig;
import io.netty.channel.udt.UdtMessage;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.List;
import static java.nio.channels.SelectionKey.*;
/**
* Message Connector for UDT Datagrams.
* <p>
* Note: send/receive must use {@link UdtMessage} in the pipeline
*
* @deprecated The UDT transport is no longer maintained and will be removed.
*/
@Deprecated
public class NioUdtMessageConnectorChannel extends AbstractNioMessageChannel implements UdtChannel {
private static final InternalLogger logger =
InternalLoggerFactory.getInstance(NioUdtMessageConnectorChannel.class);
private static final ChannelMetadata METADATA = new ChannelMetadata(false);
private final UdtChannelConfig config;
public NioUdtMessageConnectorChannel() {
this(TypeUDT.DATAGRAM);
}
public NioUdtMessageConnectorChannel(final Channel parent, final SocketChannelUDT channelUDT) {
super(parent, channelUDT, OP_READ);
try {
channelUDT.configureBlocking(false);
switch (channelUDT.socketUDT().status()) {
case INIT:
case OPENED:
config = new DefaultUdtChannelConfig(this, channelUDT, true);
break;
default:
config = new DefaultUdtChannelConfig(this, channelUDT, false);
break;
}
} catch (final Exception e) {
try {
channelUDT.close();
} catch (final Exception e2) {
logger.warn("Failed to close channel.", e2);
}
throw new ChannelException("Failed to configure channel.", e);
}
}
public NioUdtMessageConnectorChannel(final SocketChannelUDT channelUDT) {
this(null, channelUDT);
}
public NioUdtMessageConnectorChannel(final TypeUDT type) {
this(NioUdtProvider.newConnectorChannelUDT(type));
}
@Override
public UdtChannelConfig config() {
return config;
}
@Override
protected void doBind(final SocketAddress localAddress) throws Exception {
privilegedBind(javaChannel(), localAddress);
}
@Override
protected void doClose() throws Exception {
javaChannel().close();
}
@Override
protected boolean doConnect(final SocketAddress remoteAddress,
final SocketAddress localAddress) throws Exception {
doBind(localAddress != null? localAddress : new InetSocketAddress(0));
boolean success = false;
try {
final boolean connected = SocketUtils.connect(javaChannel(), remoteAddress);
if (!connected) {
selectionKey().interestOps(
selectionKey().interestOps() | OP_CONNECT);
}
success = true;
return connected;
} finally {
if (!success) {
doClose();
}
}
}
@Override
protected void doDisconnect() throws Exception {
doClose();
}
@Override
protected void doFinishConnect() throws Exception {
if (javaChannel().finishConnect()) {
selectionKey().interestOps(
selectionKey().interestOps() & ~OP_CONNECT);
} else {
throw new Error(
"Provider error: failed to finish connect. Provider library should be upgraded.");
}
}
@Override
protected int doReadMessages(List<Object> buf) throws Exception {
final int maximumMessageSize = config.getReceiveBufferSize();
final ByteBuf byteBuf = config.getAllocator().directBuffer(
maximumMessageSize);
final int receivedMessageSize = byteBuf.writeBytes(javaChannel(),
maximumMessageSize);
if (receivedMessageSize <= 0) {
byteBuf.release();
return 0;
}
if (receivedMessageSize >= maximumMessageSize) {
javaChannel().close();
throw new ChannelException(
"Invalid config : increase receive buffer size to avoid message truncation");
}
// delivers a message
buf.add(new UdtMessage(byteBuf));
return 1;
}
@Override
protected boolean doWriteMessage(Object msg, ChannelOutboundBuffer in) throws Exception {
// expects a message
final UdtMessage message = (UdtMessage) msg;
final ByteBuf byteBuf = message.content();
final int messageSize = byteBuf.readableBytes();
if (messageSize == 0) {
return true;
}
final long writtenBytes;
if (byteBuf.nioBufferCount() == 1) {
writtenBytes = javaChannel().write(byteBuf.nioBuffer());
} else {
writtenBytes = javaChannel().write(byteBuf.nioBuffers());
}
// wrote message completely
if (writtenBytes > 0 && writtenBytes != messageSize) {
throw new Error(
"Provider error: failed to write message. Provider library should be upgraded.");
}
return writtenBytes > 0;
}
@Override
public boolean isActive() {
final SocketChannelUDT channelUDT = javaChannel();
return channelUDT.isOpen() && channelUDT.isConnectFinished();
}
@Override
protected SocketChannelUDT javaChannel() {
return (SocketChannelUDT) super.javaChannel();
}
@Override
protected SocketAddress localAddress0() {
return javaChannel().socket().getLocalSocketAddress();
}
@Override
public ChannelMetadata metadata() {
return METADATA;
}
@Override
protected SocketAddress remoteAddress0() {
return javaChannel().socket().getRemoteSocketAddress();
}
@Override
public InetSocketAddress localAddress() {
return (InetSocketAddress) super.localAddress();
}
@Override
public InetSocketAddress remoteAddress() {
return (InetSocketAddress) super.remoteAddress();
}
private static void privilegedBind(final SocketChannelUDT socketChannel, final SocketAddress localAddress)
throws IOException {
try {
AccessController.doPrivileged(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws IOException {
socketChannel.bind(localAddress);
return null;
}
});
} catch (PrivilegedActionException e) {
throw (IOException) e.getCause();
}
}
}
|
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* K2.java
* Copyright (C) 2001-2012 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers.bayes.net.search.global;
import java.util.Enumeration;
import java.util.Random;
import java.util.Vector;
import weka.classifiers.bayes.BayesNet;
import weka.core.Instances;
import weka.core.Option;
import weka.core.RevisionUtils;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
/**
<!-- globalinfo-start -->
* This Bayes Network learning algorithm uses a hill climbing algorithm restricted by an order on the variables.<br/>
* <br/>
* For more information see:<br/>
* <br/>
* G.F. Cooper, E. Herskovits (1990). A Bayesian method for constructing Bayesian belief networks from databases.<br/>
* <br/>
* G. Cooper, E. Herskovits (1992). A Bayesian method for the induction of probabilistic networks from data. Machine Learning. 9(4):309-347.<br/>
* <br/>
* Works with nominal variables and no missing values only.
* <p/>
<!-- globalinfo-end -->
*
<!-- technical-bibtex-start -->
* BibTeX:
* <pre>
* @proceedings{Cooper1990,
* author = {G.F. Cooper and E. Herskovits},
* booktitle = {Proceedings of the Conference on Uncertainty in AI},
* pages = {86-94},
* title = {A Bayesian method for constructing Bayesian belief networks from databases},
* year = {1990}
* }
*
* @article{Cooper1992,
* author = {G. Cooper and E. Herskovits},
* journal = {Machine Learning},
* number = {4},
* pages = {309-347},
* title = {A Bayesian method for the induction of probabilistic networks from data},
* volume = {9},
* year = {1992}
* }
* </pre>
* <p/>
<!-- technical-bibtex-end -->
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -N
* Initial structure is empty (instead of Naive Bayes)</pre>
*
* <pre> -P <nr of parents>
* Maximum number of parents</pre>
*
* <pre> -R
* Random order.
* (default false)</pre>
*
* <pre> -mbc
* Applies a Markov Blanket correction to the network structure,
* after a network structure is learned. This ensures that all
* nodes in the network are part of the Markov blanket of the
* classifier node.</pre>
*
* <pre> -S [LOO-CV|k-Fold-CV|Cumulative-CV]
* Score type (LOO-CV,k-Fold-CV,Cumulative-CV)</pre>
*
* <pre> -Q
* Use probabilistic or 0/1 scoring.
* (default probabilistic scoring)</pre>
*
<!-- options-end -->
*
* @author Remco Bouckaert (rrb@xm.co.nz)
* @version $Revision: 8034 $
*/
public class K2
extends GlobalScoreSearchAlgorithm
implements TechnicalInformationHandler {
/** for serialization */
static final long serialVersionUID = -6626871067466338256L;
/** Holds flag to indicate ordering should be random **/
boolean m_bRandomOrder = false;
/**
* Returns an instance of a TechnicalInformation object, containing
* detailed information about the technical background of this class,
* e.g., paper reference or book this class is based on.
*
* @return the technical information about this class
*/
public TechnicalInformation getTechnicalInformation() {
TechnicalInformation result;
TechnicalInformation additional;
result = new TechnicalInformation(Type.PROCEEDINGS);
result.setValue(Field.AUTHOR, "G.F. Cooper and E. Herskovits");
result.setValue(Field.YEAR, "1990");
result.setValue(Field.TITLE, "A Bayesian method for constructing Bayesian belief networks from databases");
result.setValue(Field.BOOKTITLE, "Proceedings of the Conference on Uncertainty in AI");
result.setValue(Field.PAGES, "86-94");
additional = result.add(Type.ARTICLE);
additional.setValue(Field.AUTHOR, "G. Cooper and E. Herskovits");
additional.setValue(Field.YEAR, "1992");
additional.setValue(Field.TITLE, "A Bayesian method for the induction of probabilistic networks from data");
additional.setValue(Field.JOURNAL, "Machine Learning");
additional.setValue(Field.VOLUME, "9");
additional.setValue(Field.NUMBER, "4");
additional.setValue(Field.PAGES, "309-347");
return result;
}
/**
* search determines the network structure/graph of the network
* with the K2 algorithm, restricted by its initial structure (which can
* be an empty graph, or a Naive Bayes graph.
*
* @param bayesNet the network
* @param instances the data to work with
* @throws Exception if something goes wrong
*/
public void search (BayesNet bayesNet, Instances instances) throws Exception {
int nOrder[] = new int [instances.numAttributes()];
nOrder[0] = instances.classIndex();
int nAttribute = 0;
for (int iOrder = 1; iOrder < instances.numAttributes(); iOrder++) {
if (nAttribute == instances.classIndex()) {
nAttribute++;
}
nOrder[iOrder] = nAttribute++;
}
if (m_bRandomOrder) {
// generate random ordering (if required)
Random random = new Random();
int iClass;
if (getInitAsNaiveBayes()) {
iClass = 0;
} else {
iClass = -1;
}
for (int iOrder = 0; iOrder < instances.numAttributes(); iOrder++) {
int iOrder2 = Math.abs(random.nextInt()) % instances.numAttributes();
if (iOrder != iClass && iOrder2 != iClass) {
int nTmp = nOrder[iOrder];
nOrder[iOrder] = nOrder[iOrder2];
nOrder[iOrder2] = nTmp;
}
}
}
// determine base scores
double fBaseScore = calcScore(bayesNet);
// K2 algorithm: greedy search restricted by ordering
for (int iOrder = 1; iOrder < instances.numAttributes(); iOrder++) {
int iAttribute = nOrder[iOrder];
double fBestScore = fBaseScore;
boolean bProgress = (bayesNet.getParentSet(iAttribute).getNrOfParents() < getMaxNrOfParents());
while (bProgress && (bayesNet.getParentSet(iAttribute).getNrOfParents() < getMaxNrOfParents())) {
int nBestAttribute = -1;
for (int iOrder2 = 0; iOrder2 < iOrder; iOrder2++) {
int iAttribute2 = nOrder[iOrder2];
double fScore = calcScoreWithExtraParent(iAttribute, iAttribute2);
if (fScore > fBestScore) {
fBestScore = fScore;
nBestAttribute = iAttribute2;
}
}
if (nBestAttribute != -1) {
bayesNet.getParentSet(iAttribute).addParent(nBestAttribute, instances);
fBaseScore = fBestScore;
bProgress = true;
} else {
bProgress = false;
}
}
}
} // search
/**
* Sets the max number of parents
*
* @param nMaxNrOfParents the max number of parents
*/
public void setMaxNrOfParents(int nMaxNrOfParents) {
m_nMaxNrOfParents = nMaxNrOfParents;
}
/**
* Gets the max number of parents.
*
* @return the max number of parents
*/
public int getMaxNrOfParents() {
return m_nMaxNrOfParents;
}
/**
* Sets whether to init as naive bayes
*
* @param bInitAsNaiveBayes whether to init as naive bayes
*/
public void setInitAsNaiveBayes(boolean bInitAsNaiveBayes) {
m_bInitAsNaiveBayes = bInitAsNaiveBayes;
}
/**
* Gets whether to init as naive bayes
*
* @return whether to init as naive bayes
*/
public boolean getInitAsNaiveBayes() {
return m_bInitAsNaiveBayes;
}
/**
* Set random order flag
*
* @param bRandomOrder the random order flag
*/
public void setRandomOrder(boolean bRandomOrder) {
m_bRandomOrder = bRandomOrder;
} // SetRandomOrder
/**
* Get random order flag
*
* @return the random order flag
*/
public boolean getRandomOrder() {
return m_bRandomOrder;
} // getRandomOrder
/**
* Returns an enumeration describing the available options.
*
* @return an enumeration of all the available options.
*/
public Enumeration listOptions() {
Vector newVector = new Vector(0);
newVector.addElement(new Option("\tInitial structure is empty (instead of Naive Bayes)",
"N", 0, "-N"));
newVector.addElement(new Option("\tMaximum number of parents", "P", 1,
"-P <nr of parents>"));
newVector.addElement(new Option(
"\tRandom order.\n"
+ "\t(default false)",
"R", 0, "-R"));
Enumeration enu = super.listOptions();
while (enu.hasMoreElements()) {
newVector.addElement(enu.nextElement());
}
return newVector.elements();
}
/**
* Parses a given list of options. <p/>
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -N
* Initial structure is empty (instead of Naive Bayes)</pre>
*
* <pre> -P <nr of parents>
* Maximum number of parents</pre>
*
* <pre> -R
* Random order.
* (default false)</pre>
*
* <pre> -mbc
* Applies a Markov Blanket correction to the network structure,
* after a network structure is learned. This ensures that all
* nodes in the network are part of the Markov blanket of the
* classifier node.</pre>
*
* <pre> -S [LOO-CV|k-Fold-CV|Cumulative-CV]
* Score type (LOO-CV,k-Fold-CV,Cumulative-CV)</pre>
*
* <pre> -Q
* Use probabilistic or 0/1 scoring.
* (default probabilistic scoring)</pre>
*
<!-- options-end -->
*
* @param options the list of options as an array of strings
* @throws Exception if an option is not supported
*/
public void setOptions(String[] options) throws Exception {
setRandomOrder(Utils.getFlag('R', options));
m_bInitAsNaiveBayes = !(Utils.getFlag('N', options));
String sMaxNrOfParents = Utils.getOption('P', options);
if (sMaxNrOfParents.length() != 0) {
setMaxNrOfParents(Integer.parseInt(sMaxNrOfParents));
} else {
setMaxNrOfParents(100000);
}
super.setOptions(options);
}
/**
* Gets the current settings of the search algorithm.
*
* @return an array of strings suitable for passing to setOptions
*/
public String [] getOptions() {
String[] superOptions = super.getOptions();
String[] options = new String[4 + superOptions.length];
int current = 0;
options[current++] = "-P";
options[current++] = "" + m_nMaxNrOfParents;
if (!m_bInitAsNaiveBayes) {
options[current++] = "-N";
}
if (getRandomOrder()) {
options[current++] = "-R";
}
// insert options from parent class
for (int iOption = 0; iOption < superOptions.length; iOption++) {
options[current++] = superOptions[iOption];
}
// Fill up rest with empty strings, not nulls!
while (current < options.length) {
options[current++] = "";
}
// Fill up rest with empty strings, not nulls!
return options;
}
/**
* @return a string to describe the RandomOrder option.
*/
public String randomOrderTipText() {
return "When set to true, the order of the nodes in the network is random." +
" Default random order is false and the order" +
" of the nodes in the dataset is used." +
" In any case, when the network was initialized as Naive Bayes Network, the" +
" class variable is first in the ordering though.";
} // randomOrderTipText
/**
* This will return a string describing the search algorithm.
* @return The string.
*/
public String globalInfo() {
return
"This Bayes Network learning algorithm uses a hill climbing algorithm "
+ "restricted by an order on the variables.\n\n"
+ "For more information see:\n\n"
+ getTechnicalInformation().toString() + "\n\n"
+ "Works with nominal variables and no missing values only.";
}
/**
* Returns the revision string.
*
* @return the revision
*/
public String getRevision() {
return RevisionUtils.extract("$Revision: 8034 $");
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.glaf.core.base;
import java.io.Serializable;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.lang3.builder.ToStringBuilder;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.glaf.core.domain.ColumnDefinition;
import com.glaf.core.util.DateUtils;
import com.glaf.core.util.StringTools;
public class DataModelEntity implements DataModel, Serializable {
private static final long serialVersionUID = 1L;
protected String actorName;
protected String businessKey;
protected String createBy;
protected Date createDate;
protected Map<String, Object> dataMap = new java.util.HashMap<String, Object>();
protected int deleteFlag;
protected String fallbackFlag;
protected Map<String, ColumnModel> fields = new java.util.HashMap<String, ColumnModel>();
protected String formName;
protected Long id;
protected int level;
protected int listNo;
protected int locked;
protected String name;
protected String objectId;
protected String objectValue;
protected Long parentId;
protected String processInstanceId;
protected String processName;
protected String serviceKey;
protected String signForFlag;
protected int status;
protected String subject;
protected String tableName;
protected String taskName;
protected String treeId;
protected String typeId;
protected String updateBy;
protected Date updateDate;
protected int wfStatus;
protected List<ColumnDefinition> columns = new java.util.ArrayList<ColumnDefinition>();
public DataModelEntity() {
}
public void setColumns(List<ColumnDefinition> columns) {
this.columns = columns;
}
public void addField(ColumnModel field) {
if (fields == null) {
fields = new java.util.HashMap<String, ColumnModel>();
}
fields.put(field.getName(), field);
if (dataMap == null) {
dataMap = new java.util.HashMap<String, Object>();
}
dataMap.put(field.getName(), field.getValue());
}
public void addField(String columnName, String key, Object value) {
if (dataMap == null) {
dataMap = new java.util.HashMap<String, Object>();
}
dataMap.put(key, value);
ColumnModel field = new ColumnModel();
field.setColumnName(columnName);
field.setName(key);
field.setValue(value);
}
public String getActorName() {
return actorName;
}
public String getBusinessKey() {
return businessKey;
}
public Collection<ColumnModel> getColumns() {
return fields.values();
}
public String getCreateBy() {
return createBy;
}
public Date getCreateDate() {
return createDate;
}
public Map<String, Object> getDataMap() {
if (dataMap == null) {
dataMap = new java.util.HashMap<String, Object>();
}
return dataMap;
}
public int getDeleteFlag() {
return deleteFlag;
}
public String getFallbackFlag() {
return fallbackFlag;
}
public Map<String, ColumnModel> getFields() {
return fields;
}
public String getFormName() {
return formName;
}
public Long getId() {
return id;
}
public int getLevel() {
return level;
}
public int getListNo() {
return listNo;
}
public int getLocked() {
return locked;
}
public String getName() {
return name;
}
public String getObjectId() {
return objectId;
}
public String getObjectValue() {
return objectValue;
}
public Long getParentId() {
return parentId;
}
public String getProcessInstanceId() {
return processInstanceId;
}
public String getProcessName() {
return processName;
}
public String getServiceKey() {
return serviceKey;
}
public String getSignForFlag() {
return signForFlag;
}
public int getStatus() {
return status;
}
public String getString(String key) {
if (key != null && dataMap != null) {
Object value = dataMap.get(key);
if (value != null) {
if (value instanceof Date) {
return DateUtils.getDate((Date) value);
}
return value.toString();
}
}
return "";
}
public String getSubject() {
return subject;
}
public String getTableName() {
return tableName;
}
public String getTaskName() {
return taskName;
}
public String getTreeId() {
return treeId;
}
public String getTypeId() {
return typeId;
}
public String getUpdateBy() {
return updateBy;
}
public Date getUpdateDate() {
return updateDate;
}
public int getWfStatus() {
return wfStatus;
}
public void setActorName(String actorName) {
this.actorName = actorName;
}
public void setBusinessKey(String businessKey) {
this.businessKey = businessKey;
}
public void setCreateBy(String createBy) {
this.createBy = createBy;
}
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
public void setDataMap(Map<String, Object> dataMap) {
this.dataMap = dataMap;
}
public void setDeleteFlag(int deleteFlag) {
this.deleteFlag = deleteFlag;
}
public void setFallbackFlag(String fallbackFlag) {
this.fallbackFlag = fallbackFlag;
}
public void setFields(Map<String, ColumnModel> fields) {
this.fields = fields;
}
public void setFormName(String formName) {
this.formName = formName;
}
public void setId(Long id) {
this.id = id;
}
public void setLevel(int level) {
this.level = level;
}
public void setListNo(int listNo) {
this.listNo = listNo;
}
public void setLocked(int locked) {
this.locked = locked;
}
public void setName(String name) {
this.name = name;
}
public void setObjectId(String objectId) {
this.objectId = objectId;
}
public void setObjectValue(String objectValue) {
this.objectValue = objectValue;
}
public void setParentId(Long parentId) {
this.parentId = parentId;
}
public void setProcessInstanceId(String processInstanceId) {
this.processInstanceId = processInstanceId;
}
public void setProcessName(String processName) {
this.processName = processName;
}
public void setServiceKey(String serviceKey) {
this.serviceKey = serviceKey;
}
public void setSignForFlag(String signForFlag) {
this.signForFlag = signForFlag;
}
public void setStatus(int status) {
this.status = status;
}
public void setSubject(String subject) {
this.subject = subject;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public void setTaskName(String taskName) {
this.taskName = taskName;
}
public void setTreeId(String treeId) {
this.treeId = treeId;
}
public void setTypeId(String typeId) {
this.typeId = typeId;
}
public void setUpdateBy(String updateBy) {
this.updateBy = updateBy;
}
public void setUpdateDate(Date updateDate) {
this.updateDate = updateDate;
}
public void setWfStatus(int wfStatus) {
this.wfStatus = wfStatus;
}
public JSONObject toJsonObject() {
JSONObject jsonObject = new JSONObject();
if (dataMap != null && dataMap.size() > 0) {
dataMap.remove("dataMap");
Map<String, Object> rowMap = new java.util.HashMap<String, Object>();
rowMap.putAll(dataMap);
// dataMap.remove(StringTools.lower(this.getFormName()));
Set<Entry<String, Object>> entrySet = dataMap.entrySet();
for (Entry<String, Object> entry : entrySet) {
String name = entry.getKey();
Object value = entry.getValue();
if (value != null) {
jsonObject.put(name, value);
rowMap.put(name.toLowerCase(), value);
}
}
if (columns != null && !columns.isEmpty()) {
for (ColumnDefinition col : columns) {
Object value = rowMap
.get(col.getColumnName().toUpperCase());
if (value != null) {
if (value instanceof Date) {
Date date = (Date) value;
jsonObject.put(col.getName(),
DateUtils.getDate(date));
jsonObject.put(col.getName() + "_datetime",
DateUtils.getDateTime(date));
jsonObject.put(col.getColumnName(),
DateUtils.getDate(date));
jsonObject.put(col.getColumnName() + "_datetime",
DateUtils.getDateTime(date));
} else {
jsonObject.put(col.getColumnName(), value);
jsonObject.put(col.getName(), value);
}
}
}
}
}
jsonObject.put("id", id);
jsonObject.put("parentId", parentId);
if (businessKey != null) {
jsonObject.put("businessKey", businessKey);
}
if (subject != null) {
jsonObject.put("subject", subject);
}
if (formName != null) {
jsonObject.put("formName", formName);
}
if (processInstanceId != null) {
jsonObject.put("processInstanceId", processInstanceId);
}
if (processName != null) {
jsonObject.put("processName", processName);
}
if (objectId != null) {
jsonObject.put("objectId", objectId);
}
if (objectValue != null) {
jsonObject.put("objectValue", objectValue);
}
if (createDate != null) {
jsonObject.put("createDate", createDate);
}
if (createBy != null) {
jsonObject.put("createBy", createBy);
}
if (updateDate != null) {
jsonObject.put("updateDate", updateDate);
}
if (updateBy != null) {
jsonObject.put("updateBy", updateBy);
}
if (signForFlag != null) {
jsonObject.put("signForFlag", signForFlag);
}
jsonObject.put("status", status);
jsonObject.put("wfStatus", wfStatus);
return jsonObject;
}
public ObjectNode toObjectNode() {
ObjectNode jsonObject = new ObjectMapper().createObjectNode();
if (dataMap != null && dataMap.size() > 0) {
dataMap.remove("dataMap");
dataMap.remove(StringTools.lower(this.getFormName()));
Set<Entry<String, Object>> entrySet = dataMap.entrySet();
for (Entry<String, Object> entry : entrySet) {
String name = entry.getKey();
Object value = entry.getValue();
if (value != null) {
if (value instanceof Integer) {
Integer x = (Integer) value;
jsonObject.put(name, x);
} else if (value instanceof Long) {
Long x = (Long) value;
jsonObject.put(name, x);
} else if (value instanceof Double) {
Double x = (Double) value;
jsonObject.put(name, x);
} else if (value instanceof Date) {
Date x = (Date) value;
jsonObject.put(name, DateUtils.getDate(x));
jsonObject.put(name + "_datetime",
DateUtils.getDateTime(x));
} else if (value instanceof java.math.BigDecimal) {
java.math.BigDecimal x = (java.math.BigDecimal) value;
jsonObject.put(name, x);
} else if (value instanceof String) {
String x = (String) value;
jsonObject.put(name, x);
} else {
jsonObject.put(name, value.toString());
}
}
}
}
jsonObject.put("id", id);
jsonObject.put("parentId", parentId);
if (businessKey != null) {
jsonObject.put("businessKey", businessKey);
}
if (subject != null) {
jsonObject.put("subject", subject);
}
if (formName != null) {
jsonObject.put("formName", formName);
}
if (processInstanceId != null) {
jsonObject.put("processInstanceId", processInstanceId);
}
if (processName != null) {
jsonObject.put("processName", processName);
}
if (objectId != null) {
jsonObject.put("objectId", objectId);
}
if (objectValue != null) {
jsonObject.put("objectValue", objectValue);
}
if (createDate != null) {
jsonObject.put("createDate", DateUtils.getDate(createDate));
jsonObject.put("createDate_datetime",
DateUtils.getDateTime(createDate));
}
if (createBy != null) {
jsonObject.put("createBy", createBy);
}
if (updateDate != null) {
jsonObject.put("updateDate", DateUtils.getDate(updateDate));
jsonObject.put("updateDate_datetime",
DateUtils.getDateTime(updateDate));
}
if (updateBy != null) {
jsonObject.put("updateBy", updateBy);
}
if (signForFlag != null) {
jsonObject.put("signForFlag", signForFlag);
}
jsonObject.put("status", status);
jsonObject.put("wfStatus", wfStatus);
return jsonObject;
}
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
}
|
|
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.config;
import com.hazelcast.collection.IQueue;
import com.hazelcast.internal.cluster.Versions;
import com.hazelcast.internal.config.ConfigDataSerializerHook;
import com.hazelcast.internal.util.StringUtil;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
import com.hazelcast.nio.serialization.impl.Versioned;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import static com.hazelcast.internal.serialization.impl.SerializationUtil.readNullableList;
import static com.hazelcast.internal.serialization.impl.SerializationUtil.writeNullableList;
import static com.hazelcast.internal.util.Preconditions.checkAsyncBackupCount;
import static com.hazelcast.internal.util.Preconditions.checkBackupCount;
import static com.hazelcast.internal.util.Preconditions.checkNotNull;
/**
* Contains the configuration for an {@link IQueue}.
*/
@SuppressWarnings("checkstyle:methodcount")
public class QueueConfig implements IdentifiedDataSerializable, NamedConfig, Versioned {
/**
* Default value for the maximum size of the Queue.
*/
public static final int DEFAULT_MAX_SIZE = 0;
/**
* Default value for the synchronous backup count.
*/
public static final int DEFAULT_SYNC_BACKUP_COUNT = 1;
/**
* Default value of the asynchronous backup count.
*/
public static final int DEFAULT_ASYNC_BACKUP_COUNT = 0;
/**
* Default value for the TTL (time to live) for empty Queue.
*/
public static final int DEFAULT_EMPTY_QUEUE_TTL = -1;
private String name;
private List<ItemListenerConfig> listenerConfigs;
private int backupCount = DEFAULT_SYNC_BACKUP_COUNT;
private int asyncBackupCount = DEFAULT_ASYNC_BACKUP_COUNT;
private int maxSize = DEFAULT_MAX_SIZE;
private int emptyQueueTtl = DEFAULT_EMPTY_QUEUE_TTL;
private QueueStoreConfig queueStoreConfig;
private boolean statisticsEnabled = true;
private String splitBrainProtectionName;
private MergePolicyConfig mergePolicyConfig = new MergePolicyConfig();
private String priorityComparatorClassName;
public QueueConfig() {
}
public QueueConfig(String name) {
setName(name);
}
public QueueConfig(QueueConfig config) {
this();
this.name = config.name;
this.backupCount = config.backupCount;
this.asyncBackupCount = config.asyncBackupCount;
this.maxSize = config.maxSize;
this.emptyQueueTtl = config.emptyQueueTtl;
this.statisticsEnabled = config.statisticsEnabled;
this.splitBrainProtectionName = config.splitBrainProtectionName;
this.mergePolicyConfig = config.mergePolicyConfig;
this.queueStoreConfig = config.queueStoreConfig != null ? new QueueStoreConfig(config.queueStoreConfig) : null;
this.listenerConfigs = new ArrayList<>(config.getItemListenerConfigs());
this.priorityComparatorClassName = config.priorityComparatorClassName;
}
/**
* Returns the TTL (time to live) for emptying the Queue.
*
* @return the TTL (time to live) for emptying the Queue
*/
public int getEmptyQueueTtl() {
return emptyQueueTtl;
}
/**
* Sets the TTL (time to live) for emptying the Queue.
*
* @param emptyQueueTtl set the TTL (time to live) for emptying the Queue to this value
* @return the Queue configuration
*/
public QueueConfig setEmptyQueueTtl(int emptyQueueTtl) {
this.emptyQueueTtl = emptyQueueTtl;
return this;
}
/**
* Returns the maximum size of the Queue.
*
* @return the maximum size of the Queue
*/
public int getMaxSize() {
return maxSize == 0 ? Integer.MAX_VALUE : maxSize;
}
/**
* Sets the maximum size of the Queue.
*
* @param maxSize set the maximum size of the Queue to this value
* @return the Queue configuration
* @throws IllegalArgumentException if the provided max size is negative
*/
public QueueConfig setMaxSize(int maxSize) {
if (maxSize < 0) {
throw new IllegalArgumentException("Size of the queue can not be a negative value!");
}
this.maxSize = maxSize;
return this;
}
/**
* Get the total number of backups: the backup count plus the asynchronous backup count.
*
* @return the total number of backups
*/
public int getTotalBackupCount() {
return backupCount + asyncBackupCount;
}
/**
* Get the number of synchronous backups for this queue.
*
* @return the synchronous backup count
*/
public int getBackupCount() {
return backupCount;
}
/**
* Sets the number of synchronous backups for this queue.
*
* @param backupCount the number of synchronous backups to set
* @return the current QueueConfig
* @throws IllegalArgumentException if backupCount is smaller than 0,
* or larger than the maximum number of backups,
* or the sum of the backups and async backups is larger than the maximum
* number of backups
* @see #setAsyncBackupCount(int)
*/
public QueueConfig setBackupCount(int backupCount) {
this.backupCount = checkBackupCount(backupCount, asyncBackupCount);
return this;
}
/**
* Get the number of asynchronous backups for this queue.
*
* @return the number of asynchronous backups
*/
public int getAsyncBackupCount() {
return asyncBackupCount;
}
/**
* Sets the number of asynchronous backups. 0 means no backups.
*
* @param asyncBackupCount the number of asynchronous synchronous backups to set
* @return the updated QueueConfig
* @throws IllegalArgumentException if asyncBackupCount smaller than 0,
* or larger than the maximum number of backup
* or the sum of the backups and async backups is larger than the maximum
* number of backups
* @see #setBackupCount(int)
* @see #getAsyncBackupCount()
*/
public QueueConfig setAsyncBackupCount(int asyncBackupCount) {
this.asyncBackupCount = checkAsyncBackupCount(backupCount, asyncBackupCount);
return this;
}
/**
* Get the QueueStore (load and store queue items from/to a database) configuration.
*
* @return the QueueStore configuration
*/
public @Nullable QueueStoreConfig getQueueStoreConfig() {
return queueStoreConfig;
}
/**
* Set the QueueStore (load and store queue items from/to a database) configuration.
*
* @param queueStoreConfig set the QueueStore configuration to this configuration
* @return the QueueStore configuration
*/
public QueueConfig setQueueStoreConfig(@Nullable QueueStoreConfig queueStoreConfig) {
this.queueStoreConfig = queueStoreConfig;
return this;
}
/**
* Check if statistics are enabled for this queue.
*
* @return {@code true} if statistics are enabled, {@code false} otherwise
*/
public boolean isStatisticsEnabled() {
return statisticsEnabled;
}
/**
* Enables or disables statistics for this queue.
*
* @param statisticsEnabled {@code true} to enable statistics for this queue, {@code false} to disable
* @return the updated QueueConfig
*/
public QueueConfig setStatisticsEnabled(boolean statisticsEnabled) {
this.statisticsEnabled = statisticsEnabled;
return this;
}
/**
* @return the name of this queue
*/
public String getName() {
return name;
}
/**
* Set the name for this queue.
*
* @param name the name to set for this queue
* @return this queue configuration
*/
public QueueConfig setName(String name) {
this.name = name;
return this;
}
/**
* Add an item listener configuration to this queue.
*
* @param listenerConfig the item listener configuration to add to this queue
* @return the updated queue configuration
*/
public QueueConfig addItemListenerConfig(ItemListenerConfig listenerConfig) {
getItemListenerConfigs().add(listenerConfig);
return this;
}
/**
* Get the list of item listener configurations for this queue.
*
* @return the list of item listener configurations for this queue
*/
public @Nonnull List<ItemListenerConfig> getItemListenerConfigs() {
if (listenerConfigs == null) {
listenerConfigs = new ArrayList<>();
}
return listenerConfigs;
}
/**
* Set the list of item listener configurations for this queue.
*
* @param listenerConfigs the list of item listener configurations to set for this queue
* @return the updated queue configuration
*/
public QueueConfig setItemListenerConfigs(@Nullable List<ItemListenerConfig> listenerConfigs) {
this.listenerConfigs = listenerConfigs;
return this;
}
/**
* Returns the split brain protection name for queue operations.
*
* @return the split brain protection name
*/
public @Nullable String getSplitBrainProtectionName() {
return splitBrainProtectionName;
}
/**
* Sets the split brain protection name for queue operations.
*
* @param splitBrainProtectionName the split brain protection name
* @return the updated queue configuration
*/
public QueueConfig setSplitBrainProtectionName(@Nullable String splitBrainProtectionName) {
this.splitBrainProtectionName = splitBrainProtectionName;
return this;
}
/**
* Gets the {@link MergePolicyConfig} for this queue.
*
* @return the {@link MergePolicyConfig} for this queue
*/
public @Nonnull MergePolicyConfig getMergePolicyConfig() {
return mergePolicyConfig;
}
/**
* Sets the {@link MergePolicyConfig} for this queue.
*
* @return the updated queue configuration
*/
public QueueConfig setMergePolicyConfig(@Nonnull MergePolicyConfig mergePolicyConfig) {
this.mergePolicyConfig = checkNotNull(mergePolicyConfig, "mergePolicyConfig cannot be null");
return this;
}
/**
* Check if underlying implementation is a {@code PriorityQueue}. Otherwise
* it is a FIFO queue.
*
* @return {@code true} if priority queue has been configured, {@code false}
* otherwise
*/
public boolean isPriorityQueue() {
return !StringUtil.isNullOrEmptyAfterTrim(priorityComparatorClassName);
}
/**
* Returns the class name that will be used to compare queue items.
* If the returned class name is non-empty, the queue will behave as a priority
* queue, otherwise it behaves as a FIFO queue.
* <p>
* If this value is non-null, then Hazelcast will ignore the queue store
* {@link QueueStoreConfig#STORE_MEMORY_LIMIT} configuration value.
*/
public @Nullable String getPriorityComparatorClassName() {
return priorityComparatorClassName;
}
/**
* Sets the class name that will be used to compare queue items.
* If the provided class name is non-empty, the queue will behave as a priority
* queue, otherwise it behaves as a FIFO queue.
*
* Setting the comparator to a non-null value also makes the queue store ignore
* the {@link QueueStoreConfig#STORE_MEMORY_LIMIT} configuration value.
*
* @param priorityComparatorClassName the class name that will be used to compare queue items
* @return this QueueConfig instance
*/
public QueueConfig setPriorityComparatorClassName(@Nullable String priorityComparatorClassName) {
this.priorityComparatorClassName = priorityComparatorClassName;
return this;
}
@Override
public String toString() {
return "QueueConfig{"
+ "name='" + name + '\''
+ ", listenerConfigs=" + listenerConfigs
+ ", backupCount=" + backupCount
+ ", asyncBackupCount=" + asyncBackupCount
+ ", maxSize=" + maxSize
+ ", emptyQueueTtl=" + emptyQueueTtl
+ ", queueStoreConfig=" + queueStoreConfig
+ ", statisticsEnabled=" + statisticsEnabled
+ ", mergePolicyConfig=" + mergePolicyConfig
+ ", priorityComparatorClassName=" + priorityComparatorClassName
+ '}';
}
@Override
public int getFactoryId() {
return ConfigDataSerializerHook.F_ID;
}
@Override
public int getClassId() {
return ConfigDataSerializerHook.QUEUE_CONFIG;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(name);
writeNullableList(listenerConfigs, out);
out.writeInt(backupCount);
out.writeInt(asyncBackupCount);
out.writeInt(maxSize);
out.writeInt(emptyQueueTtl);
out.writeObject(queueStoreConfig);
out.writeBoolean(statisticsEnabled);
out.writeUTF(splitBrainProtectionName);
out.writeObject(mergePolicyConfig);
// RU_COMPAT_4_0
if (out.getVersion().isGreaterOrEqual(Versions.V4_1)) {
out.writeUTF(priorityComparatorClassName);
}
}
@Override
public void readData(ObjectDataInput in) throws IOException {
name = in.readUTF();
listenerConfigs = readNullableList(in);
backupCount = in.readInt();
asyncBackupCount = in.readInt();
maxSize = in.readInt();
emptyQueueTtl = in.readInt();
queueStoreConfig = in.readObject();
statisticsEnabled = in.readBoolean();
splitBrainProtectionName = in.readUTF();
mergePolicyConfig = in.readObject();
// RU_COMPAT_4_0
if (in.getVersion().isGreaterOrEqual(Versions.V4_1)) {
priorityComparatorClassName = in.readUTF();
}
}
@Override
@SuppressWarnings({"checkstyle:cyclomaticcomplexity"})
public final boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof QueueConfig)) {
return false;
}
QueueConfig that = (QueueConfig) o;
return backupCount == that.backupCount
&& asyncBackupCount == that.asyncBackupCount
&& getMaxSize() == that.getMaxSize()
&& emptyQueueTtl == that.emptyQueueTtl
&& statisticsEnabled == that.statisticsEnabled
&& Objects.equals(name, that.name)
&& getItemListenerConfigs().equals(that.getItemListenerConfigs())
&& Objects.equals(queueStoreConfig, that.queueStoreConfig)
&& Objects.equals(splitBrainProtectionName, that.splitBrainProtectionName)
&& Objects.equals(mergePolicyConfig, that.mergePolicyConfig)
&& Objects.equals(priorityComparatorClassName, that.priorityComparatorClassName);
}
@Override
public final int hashCode() {
return Objects.hash(name, getItemListenerConfigs(), backupCount, asyncBackupCount, getMaxSize(), emptyQueueTtl,
queueStoreConfig, statisticsEnabled, splitBrainProtectionName, mergePolicyConfig,
priorityComparatorClassName);
}
}
|
|
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.roots.impl;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.model.java.JavaModuleSourceRootTypes;
import org.jetbrains.jps.model.module.JpsModuleSourceRootType;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
public class ModuleFileIndexImpl extends FileIndexBase implements ModuleFileIndex {
private final Module myModule;
public ModuleFileIndexImpl(Module module, DirectoryIndex directoryIndex) {
super(directoryIndex, FileTypeRegistry.getInstance());
myModule = module;
}
@Override
public boolean iterateContent(@NotNull ContentIterator processor) {
final Set<VirtualFile> contentRoots = ReadAction.compute(() -> {
if (myModule.isDisposed()) return Collections.emptySet();
Set<VirtualFile> result = new LinkedHashSet<>();
VirtualFile[][] allRoots = getModuleContentAndSourceRoots(myModule);
for (VirtualFile[] roots : allRoots) {
for (VirtualFile root : roots) {
DirectoryInfo info = getInfoForFileOrDirectory(root);
if (!info.isInProject()) continue;
VirtualFile parent = root.getParent();
if (parent != null) {
DirectoryInfo parentInfo = myDirectoryIndex.getInfoForFile(parent);
if (parentInfo.isInProject() && myModule.equals(parentInfo.getModule())) continue; // inner content - skip it
}
result.add(root);
}
}
return result;
});
for (VirtualFile contentRoot : contentRoots) {
if (!iterateContentUnderDirectory(contentRoot, processor)) {
return false;
}
}
return true;
}
@Override
public boolean isInContent(@NotNull VirtualFile fileOrDir) {
DirectoryInfo info = getInfoForFileOrDirectory(fileOrDir);
return info.isInProject() && myModule.equals(info.getModule());
}
@Override
public boolean isInSourceContent(@NotNull VirtualFile fileOrDir) {
DirectoryInfo info = getInfoForFileOrDirectory(fileOrDir);
return info.isInModuleSource() && myModule.equals(info.getModule());
}
@Override
@NotNull
public List<OrderEntry> getOrderEntriesForFile(@NotNull VirtualFile fileOrDir) {
return findAllOrderEntriesWithOwnerModule(myModule, myDirectoryIndex.getOrderEntries(getInfoForFileOrDirectory(fileOrDir)));
}
@Override
public OrderEntry getOrderEntryForFile(@NotNull VirtualFile fileOrDir) {
return findOrderEntryWithOwnerModule(myModule, myDirectoryIndex.getOrderEntries(getInfoForFileOrDirectory(fileOrDir)));
}
@Override
public boolean isInTestSourceContent(@NotNull VirtualFile fileOrDir) {
DirectoryInfo info = getInfoForFileOrDirectory(fileOrDir);
return info.isInModuleSource() && myModule.equals(info.getModule())
&& JavaModuleSourceRootTypes.isTestSourceOrResource(myDirectoryIndex.getSourceRootType(info));
}
@Override
public boolean isUnderSourceRootOfType(@NotNull VirtualFile fileOrDir, @NotNull Set<? extends JpsModuleSourceRootType<?>> rootTypes) {
DirectoryInfo info = getInfoForFileOrDirectory(fileOrDir);
return info.isInModuleSource() && myModule.equals(info.getModule()) && rootTypes.contains(myDirectoryIndex.getSourceRootType(info));
}
@Override
protected boolean isScopeDisposed() {
return myModule.isDisposed();
}
@Nullable
static OrderEntry findOrderEntryWithOwnerModule(@NotNull Module ownerModule, @NotNull List<OrderEntry> orderEntries) {
if (orderEntries.size() < 10) {
for (OrderEntry orderEntry : orderEntries) {
if (orderEntry.getOwnerModule() == ownerModule) {
return orderEntry;
}
}
return null;
}
int index = Collections.binarySearch(orderEntries, new FakeOrderEntry(ownerModule), RootIndex.BY_OWNER_MODULE);
return index < 0 ? null : orderEntries.get(index);
}
@NotNull
private static List<OrderEntry> findAllOrderEntriesWithOwnerModule(@NotNull Module ownerModule, @NotNull List<OrderEntry> entries) {
if (entries.size() == 0) return Collections.emptyList();
if (entries.size() == 1) {
OrderEntry entry = entries.get(0);
return entry.getOwnerModule() == ownerModule ?
ContainerUtil.newArrayList(entries) : Collections.emptyList();
}
int index = Collections.binarySearch(entries, new FakeOrderEntry(ownerModule), RootIndex.BY_OWNER_MODULE);
if (index < 0) {
return Collections.emptyList();
}
int firstIndex = index;
while (firstIndex - 1 >= 0 && entries.get(firstIndex - 1).getOwnerModule() == ownerModule) {
firstIndex--;
}
int lastIndex = index + 1;
while (lastIndex < entries.size() && entries.get(lastIndex).getOwnerModule() == ownerModule) {
lastIndex++;
}
return ContainerUtil.newArrayList(entries.subList(firstIndex, lastIndex));
}
private static class FakeOrderEntry implements OrderEntry {
private final Module myOwnerModule;
public FakeOrderEntry(Module ownerModule) {
myOwnerModule = ownerModule;
}
@NotNull
@Override
public VirtualFile[] getFiles(OrderRootType type) {
throw new IncorrectOperationException();
}
@NotNull
@Override
public String[] getUrls(OrderRootType rootType) {
throw new IncorrectOperationException();
}
@NotNull
@Override
public String getPresentableName() {
throw new IncorrectOperationException();
}
@Override
public boolean isValid() {
throw new IncorrectOperationException();
}
@NotNull
@Override
public Module getOwnerModule() {
return myOwnerModule;
}
@Override
public <R> R accept(RootPolicy<R> policy, @Nullable R initialValue) {
throw new IncorrectOperationException();
}
@Override
public int compareTo(@NotNull OrderEntry o) {
throw new IncorrectOperationException();
}
@Override
public boolean isSynthetic() {
throw new IncorrectOperationException();
}
}
}
|
|
package com.sdklite.promise;
import static com.sdklite.promise.Internal.setTimeout;
import static com.sdklite.promise.Internal.size;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
/**
* Represents the eventual completion (or failure) of an asynchronous operation,
* and its resulting value.
*
* @author johnsonlee
*
* @param <V>
* The type of value
*/
public class Promise<V> implements Thenable<V> {
/**
* Returns a single {@link Promise} that resolves when all of the promises
* in the array argument have resolved or when the array argument contains
* no promises. It rejects with the reason of the first promise that
* rejects.
*
* @param iterable
* An array.
* @return
* <ul>
* <li>An already resolved {@link Promise} if the iterable passed is
* empty.</li>
* <li>An asynchronously resolved {@link Promise} if the iterable
* passed contains no promises.</li>
* <li>A pending {@link Promise} in all other cases. This returned
* promise is then resolved/rejected asynchronously (as soon as the
* stack is empty) when all the promises in the given iterable have
* resolved, or if any of the promises reject.</li>
* </ul>
*/
public static Promise<Object[]> all(final Object... iterable) {
return all(Arrays.asList(iterable));
}
/**
* Returns a single {@link Promise} that resolves when all of the promises
* in the array argument have resolved or when the array argument contains
* no promises. It rejects with the reason of the first promise that
* rejects.
*
* @param type
* The the value component type of returned {@link Promise}.
* @param iterable
* An array.
* @return
* <ul>
* <li>An already resolved {@link Promise} if the iterable passed is
* empty.</li>
* <li>An asynchronously resolved {@link Promise} if the iterable
* passed contains no promises.</li>
* <li>A pending {@link Promise} in all other cases. This returned
* promise is then resolved/rejected asynchronously (as soon as the
* stack is empty) when all the promises in the given iterable have
* resolved, or if any of the promises reject.</li>
* </ul>
*/
public static <T> Promise<T[]> all(final Class<T> type, final Object... iterable) {
return all(type, Arrays.asList(iterable));
}
/**
* Returns a single {@link Promise} that resolves when all of the promises
* in the iterable argument have resolved or when the iterable argument
* contains no promises. It rejects with the reason of the first promise
* that rejects.
*
* @param iterable
* An iterable object such as a Collection.
* @return
* <ul>
* <li>An already resolved {@link Promise} if the iterable passed is
* empty.</li>
* <li>An asynchronously resolved {@link Promise} if the iterable
* passed contains no promises.</li>
* <li>A pending {@link Promise} in all other cases. This returned
* promise is then resolved/rejected asynchronously (as soon as the
* stack is empty) when all the promises in the given iterable have
* resolved, or if any of the promises reject.</li>
* </ul>
*/
public static Promise<Object[]> all(final Iterable<?> iterable) {
return all(Object.class, iterable);
}
/**
* Returns a single {@link Promise} that resolves when all of the promises
* in the iterable argument have resolved or when the iterable argument
* contains no promises. It rejects with the reason of the first promise
* that rejects.
*
* @param type
* The the value component type of returned {@link Promise}.
* @param args
* An iterable object such as an Array or Collection.
* @return
* <ul>
* <li>An already resolved {@link Promise} if the iterable passed is
* empty.</li>
* <li>An asynchronously resolved {@link Promise} if the iterable
* passed contains no promises.</li>
* <li>A pending {@link Promise} in all other cases. This returned
* promise is then resolved/rejected asynchronously (as soon as the
* stack is empty) when all the promises in the given iterable have
* resolved, or if any of the promises reject.</li>
* </ul>
*/
public static <T> Promise<T[]> all(final Class<T> type, final Iterable<?> args) {
if (null == args) {
return Promise.resolve();
}
final Iterator<?> i = args.iterator();
if (!i.hasNext()) {
return Promise.resolve((T[]) Array.newInstance(type, 0));
}
final int n = size(args);
final Promise<T[]> promise = new Promise<T[]>();
final AtomicInteger index = new AtomicInteger(0);
final AtomicInteger counter = new AtomicInteger(n);
final T[] results = (T[]) Array.newInstance(type, n);
args.forEach(arg -> {
final Promise<T> next = cast(arg);
final int idx = index.getAndIncrement();
next.then(v -> {
try {
results[idx] = v;
if (0 == counter.decrementAndGet()) {
promise._resolve(results);
}
} catch (final Throwable t) {
promise._reject(t);
}
}, e -> promise._reject(e));
});
return promise;
}
/**
* Returns a {@link Promise} object that is resolved with {@code null} value
*
* @return a {@link Promise} that is resolved with {@code null} value
*/
public static <T> Promise<T> resolve() {
return new Promise<T>((resolve, reject) -> resolve.accept((T) null));
}
/**
* Returns a {@link Promise} object that is resolved with the specific value
*
* @param value
* The to be resolved
* @return a {@link Promise} that is resolved with the specific value
*/
public static <T> Promise<T> resolve(final T value) {
return new Promise<T>((resolve, reject) -> resolve.accept(value));
}
/**
* Returns a {@link Promise} instance with the specified thenable
*
* @param thenable
* The thenable to resolve
* @return a {@link Promise} will "follow" the thenable, adopting its eventual state.
*/
public static <T> Promise<T> resolve(final Thenable<T> thenable) {
if (thenable instanceof Promise) {
return (Promise<T>) thenable;
}
final Promise<T> p = new Promise<T>();
p._resolve(thenable);
return p;
}
/**
* Returns a Promise object that is rejected with the given reason.
*
* @param reason
* The reason why this Promise rejected.
* @return a {@link Promise} that is rejected with the given reason.
*/
public static <T> Promise<T> reject(final Throwable reason) {
return new Promise<T>((resolve, reject) -> reject.accept(reason));
}
private final Queue<Subscriber<V, ?>> subscribers = new ConcurrentLinkedQueue<Subscriber<V, ?>>();
private volatile V value;
private volatile Throwable reason;
private volatile AtomicReference<State> state = new AtomicReference<State>(State.PENDING);
/**
* Default constructor
*/
public Promise() {
}
/**
* Create an instance with an executor function
*
* @param executor
* The executor function
*/
public Promise(final Executor<Consumer<V>, Consumer<Throwable>> executor) {
try {
executor.accept(v -> setTimeout(() -> _resolve(v)), e -> setTimeout(() -> _reject(e)));
} catch (final Throwable e) {
_reject(e);
}
}
@Override
public synchronized Promise<V> then(final Consumer<V> onFulfilled, final Consumer<Throwable> onRejected) {
final Promise<V> next = new Promise<V>();
switch (this.state.get()) {
case FULFILLED:
setTimeout(() -> _resolve(next, onFulfilled, this.value));
break;
case REJECTED:
setTimeout(() -> _reject(next, onRejected, this.reason));
break;
default:
this.subscribers.offer(new ConsumerSubscriber<>(onFulfilled, onRejected, next));
break;
}
return next;
}
@Override
public synchronized <R> Promise<R> then(final Function<V, R> onFulfilled, final Function<Throwable, R> onRejected) {
final Promise<R> next = new Promise<R>();
switch (this.state.get()) {
case FULFILLED:
setTimeout(() -> _resolve(next, onFulfilled, this.value));
break;
case REJECTED:
setTimeout(() -> _reject(next, onRejected, this.reason));
break;
default:
this.subscribers.offer(new FunctionSubscriber<V, R>(onFulfilled, onRejected, next));
break;
}
return next;
}
private void _reject(final Throwable e) {
if (this.state.compareAndSet(State.PENDING, State.REJECTED)) {
this.reason = e;
while (!this.subscribers.isEmpty()) {
final Subscriber<V, ?> subscriber = this.subscribers.poll();
if (subscriber instanceof ConsumerSubscriber) {
final ConsumerSubscriber<V> consumer = (ConsumerSubscriber<V>) subscriber;
_reject(consumer.next, consumer.onRejected, this.reason);
} else if (subscriber instanceof Function) {
final FunctionSubscriber<V, ?> function = (FunctionSubscriber<V, ?>) subscriber;
_reject(function.next, function.onRejected, this.reason);
}
}
}
}
private void _resolve(final V value) {
if (value == this) {
_reject(new TypeException("Self resolution"));
return;
}
if (this.state.compareAndSet(State.PENDING, State.FULFILLED)) {
this.value = value;
while (!this.subscribers.isEmpty()) {
final Subscriber<V, ?> subscriber = this.subscribers.poll();
if (subscriber instanceof ConsumerSubscriber) {
final ConsumerSubscriber<V> consumer = (ConsumerSubscriber<V>) subscriber;
_resolve(consumer.next, consumer.onFulfilled, this.value);
} else if (subscriber instanceof Function) {
final FunctionSubscriber<V, ?> function = (FunctionSubscriber<V, ?>) subscriber;
_resolve(function.next, function.onFulfilled, this.value);
}
}
}
}
private void _resolve(final Thenable<V> thenable) {
if (thenable == this) {
_reject(new TypeException("Self resolution"));
return;
}
if (null == thenable) {
_resolve((V) null);
return;
}
final AtomicBoolean notrun = new AtomicBoolean(true);
try {
thenable.then(v -> {
if (notrun.compareAndSet(true, false)) {
_resolve(v);
}
} , e -> {
if (notrun.compareAndSet(true, false)) {
_reject(e);
}
});
} catch (final Throwable e) {
if (notrun.compareAndSet(true, false)) {
_reject(e);
}
}
}
private static <V> void _reject(final Promise next, final Consumer<Throwable> onRejected, final Throwable reason) {
try {
if (null != onRejected) {
onRejected.accept(reason);
next._resolve(null);
} else {
next._reject(reason);
}
} catch (final Throwable e) {
next._reject(e);
}
}
private static <V> void _resolve(final Promise next, final Consumer<V> onFulfilled, final V value) {
try {
if (null != onFulfilled) {
onFulfilled.accept(value);
next._resolve(null);
} else {
next._resolve(value);
}
} catch (final Throwable e) {
next._reject(e);
}
}
private static <R> void _reject(final Promise next, final Function<Throwable, R> onRejected, final Throwable reason) {
try {
if (null != onRejected) {
next._resolve(onRejected.apply(reason));
} else {
next._reject(reason);
}
} catch (final Throwable e) {
next._reject(e);
}
}
private static <V, R> void _resolve(final Promise next, final Function<V, R> onFulfilled, final V value) {
try {
if (null != onFulfilled) {
next._resolve(onFulfilled.apply(value));
} else {
next._resolve(value);
}
} catch (final Throwable e) {
next._reject(e);
}
}
private static <T> Promise<T> cast(final Object value) {
return (Promise<T>) (value instanceof Promise ? value : resolve(value));
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.metadata.formatting;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo;
import org.apache.hive.common.util.HiveStringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.metadata.CheckConstraint;
import org.apache.hadoop.hive.ql.metadata.DefaultConstraint;
import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.NotNullConstraint;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.metadata.UniqueConstraint;
import org.apache.hadoop.hive.ql.session.SessionState;
import static org.apache.hadoop.hive.conf.Constants.MATERIALIZED_VIEW_REWRITING_TIME_WINDOW;
/**
* Format table and index information for human readability using
* simple lines of text.
*/
class TextMetaDataFormatter implements MetaDataFormatter {
private static final Logger LOG = LoggerFactory.getLogger(TextMetaDataFormatter.class);
private static final int separator = Utilities.tabCode;
private static final int terminator = Utilities.newLineCode;
private final boolean showPartColsSeparately;
public TextMetaDataFormatter(boolean partColsSeparately) {
this.showPartColsSeparately = partColsSeparately;
}
/**
* Write an error message.
*/
@Override
public void error(OutputStream out, String msg, int errorCode, String sqlState)
throws HiveException
{
error(out, msg, errorCode, sqlState, null);
}
@Override
public void error(OutputStream out, String errorMessage, int errorCode, String sqlState, String errorDetail)
throws HiveException
{
try {
out.write(errorMessage.getBytes("UTF-8"));
if(errorDetail != null) {
out.write(errorDetail.getBytes("UTF-8"));
}
out.write(errorCode);
if(sqlState != null) {
out.write(sqlState.getBytes("UTF-8"));//this breaks all the tests in .q files
}
out.write(terminator);
} catch (Exception e) {
throw new HiveException(e);
}
}
/**
* Show a list of tables.
*/
@Override
public void showTables(DataOutputStream out, Set<String> tables)
throws HiveException {
Iterator<String> iterTbls = tables.iterator();
try {
while (iterTbls.hasNext()) {
// create a row per table name
out.write(iterTbls.next().getBytes("UTF-8"));
out.write(terminator);
}
} catch (IOException e) {
throw new HiveException(e);
}
}
/**
* Show a list of tables including table types.
*/
public void showTablesExtended(DataOutputStream out, List<Table> tables)
throws HiveException {
if (tables.isEmpty()) {
// Nothing to do
return;
}
try {
TextMetaDataTable mdt = new TextMetaDataTable();
mdt.addRow("# Table Name", "Table Type");
for (Table table : tables) {
final String tableName = table.getTableName();
final String tableType = table.getTableType().toString();
mdt.addRow(tableName, tableType);
}
// In case the query is served by HiveServer2, don't pad it with spaces,
// as HiveServer2 output is consumed by JDBC/ODBC clients.
out.write(mdt.renderTable(!SessionState.get().isHiveServerQuery()).getBytes("UTF-8"));
out.write(terminator);
} catch (IOException e) {
throw new HiveException(e);
}
}
/**
* Show a list of materialized views.
*/
@Override
public void showMaterializedViews(DataOutputStream out, List<Table> materializedViews)
throws HiveException {
if (materializedViews.isEmpty()) {
// Nothing to do
return;
}
try {
TextMetaDataTable mdt = new TextMetaDataTable();
mdt.addRow("# MV Name", "Rewriting Enabled", "Mode");
for (Table mv : materializedViews) {
final String mvName = mv.getTableName();
final String rewriteEnabled = mv.isRewriteEnabled() ? "Yes" : "No";
// Currently, we only support manual refresh
// TODO: Update whenever we have other modes
final String refreshMode = "Manual refresh";
final String timeWindowString = mv.getProperty(MATERIALIZED_VIEW_REWRITING_TIME_WINDOW);
final String mode;
if (!org.apache.commons.lang.StringUtils.isEmpty(timeWindowString)) {
long time = HiveConf.toTime(timeWindowString,
HiveConf.getDefaultTimeUnit(HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW),
TimeUnit.MINUTES);
if (time > 0L) {
mode = refreshMode + " (Valid for " + time + "min)";
} else if (time == 0L) {
mode = refreshMode + " (Valid until source tables modified)";
} else {
mode = refreshMode + " (Valid always)";
}
} else {
mode = refreshMode;
}
mdt.addRow(mvName, rewriteEnabled, mode);
}
// In case the query is served by HiveServer2, don't pad it with spaces,
// as HiveServer2 output is consumed by JDBC/ODBC clients.
out.write(mdt.renderTable(!SessionState.get().isHiveServerQuery()).getBytes("UTF-8"));
out.write(terminator);
} catch (IOException e) {
throw new HiveException(e);
}
}
@Override
public void describeTable(DataOutputStream outStream, String colPath,
String tableName, Table tbl, Partition part, List<FieldSchema> cols,
boolean isFormatted, boolean isExt,
boolean isOutputPadded, List<ColumnStatisticsObj> colStats,
PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo,
UniqueConstraint ukInfo, NotNullConstraint nnInfo, DefaultConstraint dInfo, CheckConstraint cInfo,
StorageHandlerInfo storageHandlerInfo)
throws HiveException {
try {
List<FieldSchema> partCols = tbl.isPartitioned() ? tbl.getPartCols() : null;
String output = "";
boolean isColStatsAvailable = colStats != null;
TextMetaDataTable mdt = new TextMetaDataTable();
if (isFormatted && !isColStatsAvailable) {
output = "# ";
}
if (isFormatted) {
mdt.addRow(MetaDataFormatUtils.getColumnsHeader(colStats));
}
for (FieldSchema col : cols) {
mdt.addRow(MetaDataFormatUtils.extractColumnValues(col, isColStatsAvailable,
MetaDataFormatUtils.getColumnStatisticsObject(col.getName(), col.getType(), colStats)));
}
if (isColStatsAvailable) {
mdt.transpose();
}
output += mdt.renderTable(isOutputPadded);
if (colPath.equals(tableName)) {
if ((partCols != null) && !partCols.isEmpty() && showPartColsSeparately) {
mdt = new TextMetaDataTable();
output += MetaDataFormatUtils.LINE_DELIM + "# Partition Information" + MetaDataFormatUtils.LINE_DELIM + "# ";
mdt.addRow(MetaDataFormatUtils.getColumnsHeader(null));
for (FieldSchema col : partCols) {
mdt.addRow(MetaDataFormatUtils.extractColumnValues(col));
}
output += mdt.renderTable(isOutputPadded);
}
} else {
String statsState;
if (tbl.getParameters() != null && (statsState = tbl.getParameters().get(StatsSetupConst.COLUMN_STATS_ACCURATE)) != null) {
StringBuilder str = new StringBuilder();
MetaDataFormatUtils.formatOutput(StatsSetupConst.COLUMN_STATS_ACCURATE,
isFormatted ? StringEscapeUtils.escapeJava(statsState) : HiveStringUtils.escapeJava(statsState),
str, isOutputPadded);
output = output.concat(str.toString());
}
}
outStream.write(output.getBytes("UTF-8"));
if (tableName.equals(colPath)) {
if (isFormatted) {
if (part != null) {
output = MetaDataFormatUtils.getPartitionInformation(part);
} else {
output = MetaDataFormatUtils.getTableInformation(tbl, isOutputPadded);
}
outStream.write(output.getBytes("UTF-8"));
if ((pkInfo != null && !pkInfo.getColNames().isEmpty()) ||
(fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) ||
(ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) ||
(nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) ||
cInfo != null && !cInfo.getCheckConstraints().isEmpty() ||
dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) {
output = MetaDataFormatUtils.getConstraintsInformation(pkInfo, fkInfo, ukInfo, nnInfo, dInfo, cInfo);
outStream.write(output.getBytes("UTF-8"));
}
}
// if extended desc table then show the complete details of the table
if (isExt) {
// add empty line
outStream.write(terminator);
if (part != null) {
// show partition information
outStream.write(("Detailed Partition Information").getBytes("UTF-8"));
outStream.write(separator);
outStream.write(part.getTPartition().toString().getBytes("UTF-8"));
outStream.write(separator);
// comment column is empty
outStream.write(terminator);
} else {
// show table information
outStream.write(("Detailed Table Information").getBytes("UTF-8"));
outStream.write(separator);
String tableDesc = HiveStringUtils.escapeJava(tbl.getTTable().toString());
outStream.write(tableDesc.getBytes("UTF-8"));
outStream.write(separator);
outStream.write(terminator);
}
if ((pkInfo != null && !pkInfo.getColNames().isEmpty()) ||
(fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) ||
(ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) ||
(dInfo!= null && !dInfo.getDefaultConstraints().isEmpty()) ||
(cInfo != null && !cInfo.getCheckConstraints().isEmpty()) ||
(nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty())) {
outStream.write(("Constraints").getBytes("UTF-8"));
outStream.write(separator);
if (pkInfo != null && !pkInfo.getColNames().isEmpty()) {
outStream.write(pkInfo.toString().getBytes("UTF-8"));
outStream.write(terminator);
}
if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) {
outStream.write(fkInfo.toString().getBytes("UTF-8"));
outStream.write(terminator);
}
if (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) {
outStream.write(ukInfo.toString().getBytes("UTF-8"));
outStream.write(terminator);
}
if (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) {
outStream.write(nnInfo.toString().getBytes("UTF-8"));
outStream.write(terminator);
}
if (dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) {
outStream.write(dInfo.toString().getBytes("UTF-8"));
outStream.write(terminator);
}
if (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) {
outStream.write(cInfo.toString().getBytes("UTF-8"));
outStream.write(terminator);
}
}
if (storageHandlerInfo!= null) {
outStream.write(("StorageHandlerInfo").getBytes("UTF-8"));
outStream.write(terminator);
outStream.write(storageHandlerInfo.formatAsText().getBytes("UTF-8"));
outStream.write(terminator);
}
}
}
} catch (IOException e) {
throw new HiveException(e);
}
}
@Override
public void showTableStatus(DataOutputStream outStream,
Hive db,
HiveConf conf,
List<Table> tbls,
Map<String, String> part,
Partition par)
throws HiveException
{
try {
Iterator<Table> iterTables = tbls.iterator();
while (iterTables.hasNext()) {
// create a row per table name
Table tbl = iterTables.next();
String tableName = tbl.getTableName();
String tblLoc = null;
String inputFormattCls = null;
String outputFormattCls = null;
if (part != null) {
if (par != null) {
if (par.getLocation() != null) {
tblLoc = par.getDataLocation().toString();
}
inputFormattCls = par.getInputFormatClass() == null ? null : par.getInputFormatClass().getName();
outputFormattCls = par.getOutputFormatClass() == null ? null : par.getOutputFormatClass().getName();
}
} else {
if (tbl.getPath() != null) {
tblLoc = tbl.getDataLocation().toString();
}
inputFormattCls = tbl.getInputFormatClass() == null ? null : tbl.getInputFormatClass().getName();
outputFormattCls = tbl.getOutputFormatClass() == null ? null : tbl.getOutputFormatClass().getName();
}
String owner = tbl.getOwner();
List<FieldSchema> cols = tbl.getCols();
String ddlCols = MetaStoreUtils.getDDLFromFieldSchema("columns", cols);
boolean isPartitioned = tbl.isPartitioned();
String partitionCols = "";
if (isPartitioned) {
partitionCols = MetaStoreUtils.getDDLFromFieldSchema(
"partition_columns", tbl.getPartCols());
}
outStream.write(("tableName:" + tableName).getBytes("UTF-8"));
outStream.write(terminator);
outStream.write(("owner:" + owner).getBytes("UTF-8"));
outStream.write(terminator);
outStream.write(("location:" + tblLoc).getBytes("UTF-8"));
outStream.write(terminator);
outStream.write(("inputformat:" + inputFormattCls).getBytes("UTF-8"));
outStream.write(terminator);
outStream.write(("outputformat:" + outputFormattCls).getBytes("UTF-8"));
outStream.write(terminator);
outStream.write(("columns:" + ddlCols).getBytes("UTF-8"));
outStream.write(terminator);
outStream.write(("partitioned:" + isPartitioned).getBytes("UTF-8"));
outStream.write(terminator);
outStream.write(("partitionColumns:" + partitionCols).getBytes("UTF-8"));
outStream.write(terminator);
// output file system information
Path tblPath = tbl.getPath();
List<Path> locations = new ArrayList<Path>();
if (isPartitioned) {
if (par == null) {
for (Partition curPart : db.getPartitions(tbl)) {
if (curPart.getLocation() != null) {
locations.add(new Path(curPart.getLocation()));
}
}
} else {
if (par.getLocation() != null) {
locations.add(new Path(par.getLocation()));
}
}
} else {
if (tblPath != null) {
locations.add(tblPath);
}
}
if (!locations.isEmpty()) {
writeFileSystemStats(outStream, conf, locations, tblPath, false, 0);
}
outStream.write(terminator);
}
} catch (IOException e) {
throw new HiveException(e);
}
}
private static class FileData {
public long totalFileSize = 0;
public long maxFileSize = 0;
public long minFileSize = Long.MAX_VALUE;
public long lastAccessTime = 0;
public long lastUpdateTime = 0;
public int numOfFiles = 0;
int numOfErasureCodedFiles = 0;
}
// TODO: why is this in text formatter?!!
// This computes stats and should be in stats (de-duplicated too).
private void writeFileSystemStats(DataOutputStream outStream,
HiveConf conf,
List<Path> locations,
Path tblPath, boolean partSpecified, int indent) throws IOException {
FileData fd = new FileData();
boolean unknown = false;
FileSystem fs = tblPath.getFileSystem(conf);
// in case all files in locations do not exist
try {
FileStatus tmpStatus = fs.getFileStatus(tblPath);
fd.lastAccessTime = tmpStatus.getAccessTime();
fd.lastUpdateTime = tmpStatus.getModificationTime();
if (partSpecified) {
// check whether the part exists or not in fs
tmpStatus = fs.getFileStatus(locations.get(0));
}
} catch (IOException e) {
LOG.warn(
"Cannot access File System. File System status will be unknown: ", e);
unknown = true;
}
if (!unknown) {
for (Path loc : locations) {
try {
FileStatus status = fs.getFileStatus(loc);
// no matter loc is the table location or part location, it must be a
// directory.
if (!status.isDirectory()) {
continue;
}
processDir(status, fs, fd);
} catch (IOException e) {
// ignore
}
}
}
String unknownString = "unknown";
for (int k = 0; k < indent; k++) {
outStream.write(Utilities.INDENT.getBytes("UTF-8"));
}
outStream.write("totalNumberFiles:".getBytes("UTF-8"));
outStream.write((unknown ? unknownString : "" + fd.numOfFiles).getBytes("UTF-8"));
outStream.write(terminator);
if (fd.numOfErasureCodedFiles > 0) {
outStream.write("totalNumberErasureCodedFiles:".getBytes("UTF-8"));
outStream.write((unknown ? unknownString : "" + fd.numOfErasureCodedFiles).getBytes("UTF-8"));
outStream.write(terminator);
}
for (int k = 0; k < indent; k++) {
outStream.write(Utilities.INDENT.getBytes("UTF-8"));
}
outStream.write("totalFileSize:".getBytes("UTF-8"));
outStream.write((unknown ? unknownString : "" + fd.totalFileSize).getBytes("UTF-8"));
outStream.write(terminator);
for (int k = 0; k < indent; k++) {
outStream.write(Utilities.INDENT.getBytes("UTF-8"));
}
outStream.write("maxFileSize:".getBytes("UTF-8"));
outStream.write((unknown ? unknownString : "" + fd.maxFileSize).getBytes("UTF-8"));
outStream.write(terminator);
for (int k = 0; k < indent; k++) {
outStream.write(Utilities.INDENT.getBytes("UTF-8"));
}
outStream.write("minFileSize:".getBytes("UTF-8"));
if (fd.numOfFiles > 0) {
outStream.write((unknown ? unknownString : "" + fd.minFileSize).getBytes("UTF-8"));
} else {
outStream.write((unknown ? unknownString : "" + 0).getBytes("UTF-8"));
}
outStream.write(terminator);
for (int k = 0; k < indent; k++) {
outStream.write(Utilities.INDENT.getBytes("UTF-8"));
}
outStream.write("lastAccessTime:".getBytes("UTF-8"));
outStream.writeBytes((unknown || fd.lastAccessTime < 0) ? unknownString : ""
+ fd.lastAccessTime);
outStream.write(terminator);
for (int k = 0; k < indent; k++) {
outStream.write(Utilities.INDENT.getBytes("UTF-8"));
}
outStream.write("lastUpdateTime:".getBytes("UTF-8"));
outStream.write((unknown ? unknownString : "" + fd.lastUpdateTime).getBytes("UTF-8"));
outStream.write(terminator);
}
private void processDir(FileStatus status, FileSystem fs, FileData fd) throws IOException {
long accessTime = status.getAccessTime();
long updateTime = status.getModificationTime();
if (accessTime > fd.lastAccessTime) {
fd.lastAccessTime = accessTime;
}
if (updateTime > fd.lastUpdateTime) {
fd.lastUpdateTime = updateTime;
}
FileStatus[] files = fs.listStatus(status.getPath());
for (FileStatus currentStatus : files) {
if (currentStatus.isDirectory()) {
processDir(currentStatus, fs, fd);
continue;
}
fd.numOfFiles++;
if (currentStatus.isErasureCoded()) {
fd.numOfErasureCodedFiles++;
}
long fileLen = currentStatus.getLen();
fd.totalFileSize += fileLen;
if (fileLen > fd.maxFileSize) {
fd.maxFileSize = fileLen;
}
if (fileLen < fd.minFileSize) {
fd.minFileSize = fileLen;
}
accessTime = currentStatus.getAccessTime();
updateTime = currentStatus.getModificationTime();
if (accessTime > fd.lastAccessTime) {
fd.lastAccessTime = accessTime;
}
if (updateTime > fd.lastUpdateTime) {
fd.lastUpdateTime = updateTime;
}
}
}
/**
* Show the table partitions.
*/
@Override
public void showTablePartitions(DataOutputStream outStream, List<String> parts)
throws HiveException
{
try {
for (String part : parts) {
// Partition names are URL encoded. We decode the names unless Hive
// is configured to use the encoded names.
SessionState ss = SessionState.get();
if (ss != null && ss.getConf() != null &&
!ss.getConf().getBoolVar(HiveConf.ConfVars.HIVE_DECODE_PARTITION_NAME)) {
outStream.write(part.getBytes("UTF-8"));
} else {
outStream.write(FileUtils.unescapePathName(part).getBytes("UTF-8"));
}
outStream.write(terminator);
}
} catch (IOException e) {
throw new HiveException(e);
}
}
/**
* Show the list of databases
*/
@Override
public void showDatabases(DataOutputStream outStream, List<String> databases)
throws HiveException
{
try {
for (String database : databases) {
// create a row per database name
outStream.write(database.getBytes("UTF-8"));
outStream.write(terminator);
}
} catch (IOException e) {
throw new HiveException(e);
}
}
/**
* Describe a database
*/
@Override
public void showDatabaseDescription(DataOutputStream outStream, String database, String comment,
String location, String ownerName, PrincipalType ownerType, Map<String, String> params)
throws HiveException {
try {
outStream.write(database.getBytes("UTF-8"));
outStream.write(separator);
if (comment != null) {
outStream.write(HiveStringUtils.escapeJava(comment).getBytes("UTF-8"));
}
outStream.write(separator);
if (location != null) {
outStream.write(location.getBytes("UTF-8"));
}
outStream.write(separator);
if (ownerName != null) {
outStream.write(ownerName.getBytes("UTF-8"));
}
outStream.write(separator);
if (ownerType != null) {
outStream.write(ownerType.name().getBytes("UTF-8"));
}
outStream.write(separator);
if (params != null && !params.isEmpty()) {
outStream.write(params.toString().getBytes("UTF-8"));
}
outStream.write(terminator);
} catch (IOException e) {
throw new HiveException(e);
}
}
private static final Charset UTF_8 = Charset.forName("UTF-8");
public void showResourcePlans(DataOutputStream out, List<WMResourcePlan> resourcePlans)
throws HiveException {
try {
for (WMResourcePlan plan : resourcePlans) {
out.write(plan.getName().getBytes(UTF_8));
out.write(separator);
out.write(plan.getStatus().name().getBytes(UTF_8));
out.write(separator);
if (plan.isSetQueryParallelism()) {
out.write(Integer.toString(plan.getQueryParallelism()).getBytes(UTF_8));
} else {
write(out, "null");
}
out.write(separator);
if (plan.isSetDefaultPoolPath()) {
out.write(plan.getDefaultPoolPath().getBytes(UTF_8));
} else {
write(out, "null");
}
out.write(terminator);
}
} catch (IOException e) {
throw new HiveException(e);
}
}
/**
* Class to print text records for resource plans in the following format:
*
* <rp_name>[status=<STATUS>,parallelism=<parallelism>,defaultPool=<defaultPool>]
* <queue_name>[allocFraction=<fraction>,schedulingPolicy=<policy>,parallelism=<parallelism>]
* > <trigger_name>: if(<triggerExpression>){<actionExpression>}
*/
private static class TextRPFormatter implements MetaDataFormatUtils.RPFormatter {
private static final byte[] INDENT = str(" ");
private static final byte[] INDENT2 = str(" | ");
private static final byte[] INDENT_BRANCH = str(" + ");
private final DataOutputStream out;
private int indentLevel = 0;
TextRPFormatter(DataOutputStream out) {
this.out = out;
}
@Override
public void startRP(String rpName, Object ... kvPairs) throws IOException {
write(out, rpName);
writeFields(kvPairs);
out.write(terminator);
}
@Override
public void endRP() throws IOException {
}
@Override
public void startPools() throws IOException {
}
@Override
public void endPools() throws IOException {
}
@Override
public void startPool(String poolName, Object ... kvPairs) throws IOException {
++indentLevel;
writeIndent(true);
write(out, poolName);
writeFields(kvPairs);
out.write(terminator);
}
@Override
public void endPool() throws IOException {
--indentLevel;
}
@Override
public void startTriggers() throws IOException {
}
@Override
public void startMappings() throws IOException {
}
@Override
public void endTriggers() throws IOException {
}
@Override
public void endMappings() throws IOException {
}
private void writeFields(Object ... kvPairs)
throws IOException {
if (kvPairs.length % 2 != 0) {
throw new IllegalArgumentException("Expected pairs, got: " + kvPairs.length);
}
if (kvPairs.length < 2) {
return;
}
out.write('[');
out.write(kvPairs[0].toString().getBytes(UTF_8));
out.write('=');
out.write((kvPairs[1] == null ? "null" : kvPairs[1].toString()).getBytes(UTF_8));
for (int i = 2; i < kvPairs.length; i += 2) {
out.write(',');
out.write(kvPairs[i].toString().getBytes(UTF_8));
out.write('=');
out.write((kvPairs[i + 1] == null ? "null" : kvPairs[i + 1].toString()).getBytes(UTF_8));
}
out.write(']');
}
@Override
public void formatTrigger(
String triggerName, String actionExpression, String triggerExpression) throws IOException {
writeIndent(false);
write(out, "trigger ");
write(out, triggerName);
write(out, ": if (");
write(out, triggerExpression);
write(out, ") { ");
write(out, actionExpression);
write(out, " }");
out.write(terminator);
}
@Override
public void formatMappingType(String type, List<String> names) throws IOException {
final int maxList = 5;
writeIndent(false);
write(out, "mapped for ");
out.write(type.toLowerCase().getBytes(UTF_8));
if (!names.isEmpty()) {
write(out, "s: ");
int count = Math.min(maxList, names.size());
for (int i = 0; i < count; ++i) {
if (i != 0) {
write(out, ", ");
}
out.write(names.get(i).getBytes(UTF_8));
}
int remaining = names.size() - count;
if (remaining > 0) {
out.write((" and " + remaining + " others").getBytes(UTF_8));
}
}
out.write(terminator);
}
private void writeIndent(boolean isPool) throws IOException {
for (int i = 0; i < indentLevel - 1; ++i) {
out.write(INDENT);
}
if (isPool) {
out.write(INDENT_BRANCH);
} else {
out.write(INDENT);
out.write(INDENT2);
}
}
}
public void showFullResourcePlan(DataOutputStream out, WMFullResourcePlan fullResourcePlan)
throws HiveException {
MetaDataFormatUtils.formatFullRP(new TextRPFormatter(out), fullResourcePlan);
}
private static byte[] str(String str) {
return str.getBytes(UTF_8);
}
private static void write(DataOutputStream out, String val) throws IOException {
out.write(str(val));
}
public void showErrors(DataOutputStream out, WMValidateResourcePlanResponse response)
throws HiveException {
try {
for (String error : response.getErrors()) {
write(out, error);
out.write(terminator);
}
for (String warning : response.getWarnings()) {
write(out, "warn: ");
write(out, warning);
out.write(terminator);
}
} catch (IOException e) {
throw new HiveException(e);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator;
import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator;
import org.apache.hadoop.hive.ql.exec.DependencyCollectionTask;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.UnionOperator;
import org.apache.hadoop.hive.ql.exec.tez.TezTask;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.plan.BaseWork;
import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.MergeJoinWork;
import org.apache.hadoop.hive.ql.plan.MoveWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.TezEdgeProperty;
import org.apache.hadoop.hive.ql.plan.TezWork;
import org.apache.hadoop.hive.ql.plan.UnionWork;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
/**
* GenTezProcContext. GenTezProcContext maintains information
* about the tasks and operators as we walk the operator tree
* to break them into TezTasks.
*
*/
public class GenTezProcContext implements NodeProcessorCtx {
public final ParseContext parseContext;
public final HiveConf conf;
public final List<Task<MoveWork>> moveTask;
// rootTasks is the entry point for all generated tasks
public final List<Task<?>> rootTasks;
public final Set<ReadEntity> inputs;
public final Set<WriteEntity> outputs;
// holds the root of the operator tree we're currently processing
// this could be a table scan, but also a join, ptf, etc (i.e.:
// first operator of a reduce task.
public Operator<? extends OperatorDesc> currentRootOperator;
// this is the original parent of the currentRootOperator as we scan
// through the graph. A root operator might have multiple parents and
// we just use this one to remember where we came from in the current
// walk.
public Operator<? extends OperatorDesc> parentOfRoot;
// sequence number is used to name vertices (e.g.: Map 1, Reduce 14, ...)
private AtomicInteger sequenceNumber;
// tez task we're currently processing
public TezTask currentTask;
// last work we've processed (in order to hook it up to the current
// one.
public BaseWork preceedingWork;
// map that keeps track of the last operator of a task to the work
// that follows it. This is used for connecting them later.
public final Map<Operator<?>, BaseWork> leafOperatorToFollowingWork;
// a map that keeps track of work that need to be linked while
// traversing an operator tree
public final Map<Operator<?>, Map<BaseWork,TezEdgeProperty>> linkOpWithWorkMap;
// a map to keep track of what reduce sinks have to be hooked up to
// map join work
public final Map<BaseWork, List<ReduceSinkOperator>> linkWorkWithReduceSinkMap;
// map that says which mapjoin belongs to which work item
public final Map<MapJoinOperator, List<BaseWork>> mapJoinWorkMap;
// Mapping of reducesink to mapjoin operators
// Only used for dynamic partitioned hash joins (mapjoin operator in the reducer)
public final Map<Operator<?>, MapJoinOperator> smallTableParentToMapJoinMap;
// a map to keep track of which root generated which work
public final Map<Operator<?>, BaseWork> rootToWorkMap;
// a map to keep track of which child generated with work
public final Map<Operator<?>, List<BaseWork>> childToWorkMap;
// we need to keep the original list of operators in the map join to know
// what position in the mapjoin the different parent work items will have.
public final Map<MapJoinOperator, List<Operator<?>>> mapJoinParentMap;
// remember the dummy ops we created
public final Map<Operator<?>, List<Operator<?>>> linkChildOpWithDummyOp;
// used to group dependent tasks for multi table inserts
public final DependencyCollectionTask dependencyTask;
// remember map joins as we encounter them.
public final Set<MapJoinOperator> currentMapJoinOperators;
// used to hook up unions
public final Map<Operator<?>, BaseWork> unionWorkMap;
public final Map<Operator<?>, UnionWork> rootUnionWorkMap;
public List<UnionOperator> currentUnionOperators;
public final Set<BaseWork> workWithUnionOperators;
public final Set<ReduceSinkOperator> clonedReduceSinks;
// we link filesink that will write to the same final location
public final Map<Path, List<FileSinkDesc>> linkedFileSinks;
public final Set<FileSinkOperator> fileSinkSet;
// remember which reducesinks we've already connected
public final Set<ReduceSinkOperator> connectedReduceSinks;
public final Map<Operator<?>, MergeJoinWork> opMergeJoinWorkMap;
public CommonMergeJoinOperator currentMergeJoinOperator;
// remember the event operators we've seen
public final Set<AppMasterEventOperator> eventOperatorSet;
// remember the event operators we've abandoned.
public final Set<AppMasterEventOperator> abandonedEventOperatorSet;
// remember the connections between ts and event
public final Map<TableScanOperator, List<AppMasterEventOperator>> tsToEventMap;
// When processing dynamic partitioned hash joins, some of the small tables may not get processed
// before the mapjoin's parents are removed during GenTezWork.process(). This is to keep
// track of which small tables haven't been processed yet.
public Map<MapJoinOperator, Set<ReduceSinkOperator>> mapJoinToUnprocessedSmallTableReduceSinks;
@SuppressWarnings("unchecked")
public GenTezProcContext(HiveConf conf, ParseContext parseContext,
List<Task<MoveWork>> moveTask, List<Task<?>> rootTasks,
Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
this.conf = conf;
this.parseContext = parseContext;
this.moveTask = moveTask;
this.rootTasks = rootTasks;
this.inputs = inputs;
this.outputs = outputs;
this.currentTask = (TezTask) TaskFactory.get(
new TezWork(conf.getVar(HiveConf.ConfVars.HIVEQUERYID), conf));
this.leafOperatorToFollowingWork = new LinkedHashMap<Operator<?>, BaseWork>();
this.linkOpWithWorkMap = new LinkedHashMap<Operator<?>, Map<BaseWork, TezEdgeProperty>>();
this.linkWorkWithReduceSinkMap = new LinkedHashMap<BaseWork, List<ReduceSinkOperator>>();
this.smallTableParentToMapJoinMap = new LinkedHashMap<Operator<?>, MapJoinOperator>();
this.mapJoinWorkMap = new LinkedHashMap<MapJoinOperator, List<BaseWork>>();
this.rootToWorkMap = new LinkedHashMap<Operator<?>, BaseWork>();
this.childToWorkMap = new LinkedHashMap<Operator<?>, List<BaseWork>>();
this.mapJoinParentMap = new LinkedHashMap<MapJoinOperator, List<Operator<?>>>();
this.currentMapJoinOperators = new LinkedHashSet<MapJoinOperator>();
this.linkChildOpWithDummyOp = new LinkedHashMap<Operator<?>, List<Operator<?>>>();
this.dependencyTask = (DependencyCollectionTask)
TaskFactory.get(new DependencyCollectionWork());
this.unionWorkMap = new LinkedHashMap<Operator<?>, BaseWork>();
this.rootUnionWorkMap = new LinkedHashMap<Operator<?>, UnionWork>();
this.currentUnionOperators = new LinkedList<UnionOperator>();
this.workWithUnionOperators = new LinkedHashSet<BaseWork>();
this.clonedReduceSinks = new LinkedHashSet<ReduceSinkOperator>();
this.linkedFileSinks = new LinkedHashMap<Path, List<FileSinkDesc>>();
this.fileSinkSet = new LinkedHashSet<FileSinkOperator>();
this.connectedReduceSinks = new LinkedHashSet<ReduceSinkOperator>();
this.eventOperatorSet = new LinkedHashSet<AppMasterEventOperator>();
this.abandonedEventOperatorSet = new LinkedHashSet<AppMasterEventOperator>();
this.tsToEventMap = new LinkedHashMap<TableScanOperator, List<AppMasterEventOperator>>();
this.opMergeJoinWorkMap = new LinkedHashMap<Operator<?>, MergeJoinWork>();
this.currentMergeJoinOperator = null;
this.mapJoinToUnprocessedSmallTableReduceSinks = new HashMap<MapJoinOperator, Set<ReduceSinkOperator>>();
this.sequenceNumber = parseContext.getContext().getSequencer();
rootTasks.add(currentTask);
}
public int nextSequenceNumber() {
return sequenceNumber.incrementAndGet();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms.reply;
import java.math.BigInteger;
import java.util.Random;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.Session;
import org.apache.camel.AsyncCallback;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.component.jms.DefaultSpringErrorHandler;
import org.apache.camel.component.jms.ReplyToType;
import org.springframework.jms.listener.AbstractMessageListenerContainer;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import org.springframework.jms.support.destination.DestinationResolver;
/**
* A {@link ReplyManager} when using regular queues.
*
* @version
*/
public class QueueReplyManager extends ReplyManagerSupport {
private String replyToSelectorValue;
private MessageSelectorCreator dynamicMessageSelector;
public QueueReplyManager(CamelContext camelContext) {
super(camelContext);
}
protected ReplyHandler createReplyHandler(ReplyManager replyManager, Exchange exchange, AsyncCallback callback,
String originalCorrelationId, String correlationId, long requestTimeout) {
return new QueueReplyHandler(replyManager, exchange, callback,
originalCorrelationId, correlationId, requestTimeout);
}
public void updateCorrelationId(String correlationId, String newCorrelationId, long requestTimeout) {
log.trace("Updated provisional correlationId [{}] to expected correlationId [{}]", correlationId, newCorrelationId);
ReplyHandler handler = correlation.remove(correlationId);
if (handler == null) {
// should not happen that we can't find the handler
return;
}
correlation.put(newCorrelationId, handler, requestTimeout);
}
protected void handleReplyMessage(String correlationID, Message message, Session session) {
ReplyHandler handler = correlation.get(correlationID);
if (handler == null && endpoint.isUseMessageIDAsCorrelationID()) {
handler = waitForProvisionCorrelationToBeUpdated(correlationID, message);
}
if (handler != null) {
correlation.remove(correlationID);
handler.onReply(correlationID, message, session);
} else {
// we could not correlate the received reply message to a matching request and therefore
// we cannot continue routing the unknown message
// log a warn and then ignore the message
log.warn("Reply received for unknown correlationID [{}] on reply destination [{}]. Current correlation map size: {}. The message will be ignored: {}",
new Object[]{correlationID, replyTo, correlation.size(), message});
}
}
public void setReplyToSelectorHeader(org.apache.camel.Message camelMessage, Message jmsMessage) throws JMSException {
String replyToSelectorName = endpoint.getReplyToDestinationSelectorName();
if (replyToSelectorName != null && replyToSelectorValue != null) {
camelMessage.setHeader(replyToSelectorName, replyToSelectorValue);
jmsMessage.setStringProperty(replyToSelectorName, replyToSelectorValue);
}
}
private final class DestinationResolverDelegate implements DestinationResolver {
private DestinationResolver delegate;
private Destination destination;
public DestinationResolverDelegate(DestinationResolver delegate) {
this.delegate = delegate;
}
public Destination resolveDestinationName(Session session, String destinationName,
boolean pubSubDomain) throws JMSException {
synchronized (QueueReplyManager.this) {
// resolve the reply to destination
if (destination == null) {
destination = delegate.resolveDestinationName(session, destinationName, pubSubDomain);
setReplyTo(destination);
}
}
return destination;
}
};
protected AbstractMessageListenerContainer createListenerContainer() throws Exception {
DefaultMessageListenerContainer answer;
ReplyToType type = endpoint.getConfiguration().getReplyToType();
if (type == null) {
// use shared by default for reply queues
type = ReplyToType.Shared;
}
if (ReplyToType.Shared == type) {
// shared reply to queues support either a fixed or dynamic JMS message selector
String replyToSelectorName = endpoint.getReplyToDestinationSelectorName();
if (replyToSelectorName != null) {
// create a random selector value we will use for the reply queue
replyToSelectorValue = "ID:" + new BigInteger(24 * 8, new Random()).toString(16);
String fixedMessageSelector = replyToSelectorName + "='" + replyToSelectorValue + "'";
answer = new SharedQueueMessageListenerContainer(endpoint, fixedMessageSelector);
// must use cache level consumer for fixed message selector
answer.setCacheLevel(DefaultMessageListenerContainer.CACHE_CONSUMER);
log.debug("Using shared queue: " + endpoint.getReplyTo() + " with fixed message selector [" + fixedMessageSelector + "] as reply listener: " + answer);
} else {
// use a dynamic message selector which will select the message we want to receive as reply
dynamicMessageSelector = new MessageSelectorCreator(correlation);
answer = new SharedQueueMessageListenerContainer(endpoint, dynamicMessageSelector);
// must use cache level session for dynamic message selector,
// as otherwise the dynamic message selector will not be updated on-the-fly
answer.setCacheLevel(DefaultMessageListenerContainer.CACHE_SESSION);
log.debug("Using shared queue: " + endpoint.getReplyTo() + " with dynamic message selector as reply listener: " + answer);
}
// shared is not as fast as temporary or exclusive, so log this so the end user may be aware of this
log.warn("{} is using a shared reply queue, which is not as fast as alternatives."
+ " See more detail at the section 'Request-reply over JMS' at http://camel.apache.org/jms", endpoint);
} else if (ReplyToType.Exclusive == type) {
answer = new ExclusiveQueueMessageListenerContainer(endpoint);
// must use cache level consumer for exclusive as there is no message selector
answer.setCacheLevel(DefaultMessageListenerContainer.CACHE_CONSUMER);
log.debug("Using exclusive queue:" + endpoint.getReplyTo() + " as reply listener: " + answer);
} else {
throw new IllegalArgumentException("ReplyToType " + type + " is not supported for reply queues");
}
String replyToCacheLevelName = endpoint.getConfiguration().getReplyToCacheLevelName();
if (replyToCacheLevelName != null) {
answer.setCacheLevelName(replyToCacheLevelName);
log.debug("Setting the replyCacheLevel to be " + replyToCacheLevelName);
}
DestinationResolver resolver = endpoint.getDestinationResolver();
if (resolver == null) {
resolver = answer.getDestinationResolver();
}
answer.setDestinationResolver(new DestinationResolverDelegate(resolver));
answer.setDestinationName(endpoint.getReplyTo());
answer.setAutoStartup(true);
answer.setIdleConsumerLimit(endpoint.getIdleConsumerLimit());
answer.setIdleTaskExecutionLimit(endpoint.getIdleTaskExecutionLimit());
if (endpoint.getMaxMessagesPerTask() >= 0) {
answer.setMaxMessagesPerTask(endpoint.getMaxMessagesPerTask());
}
answer.setMessageListener(this);
answer.setPubSubDomain(false);
answer.setSubscriptionDurable(false);
answer.setConcurrentConsumers(endpoint.getReplyToConcurrentConsumers());
if (endpoint.getReplyToMaxConcurrentConsumers() > 0) {
answer.setMaxConcurrentConsumers(endpoint.getReplyToMaxConcurrentConsumers());
}
answer.setConnectionFactory(endpoint.getConnectionFactory());
String clientId = endpoint.getClientId();
if (clientId != null) {
clientId += ".CamelReplyManager";
answer.setClientId(clientId);
}
// we cannot do request-reply over JMS with transaction
answer.setSessionTransacted(false);
// other optional properties
if (endpoint.getExceptionListener() != null) {
answer.setExceptionListener(endpoint.getExceptionListener());
}
if (endpoint.getErrorHandler() != null) {
answer.setErrorHandler(endpoint.getErrorHandler());
} else {
answer.setErrorHandler(new DefaultSpringErrorHandler(endpoint.getCamelContext(), QueueReplyManager.class, endpoint.getErrorHandlerLoggingLevel(), endpoint.isErrorHandlerLogStackTrace()));
}
if (endpoint.getReceiveTimeout() >= 0) {
answer.setReceiveTimeout(endpoint.getReceiveTimeout());
}
if (endpoint.getRecoveryInterval() >= 0) {
answer.setRecoveryInterval(endpoint.getRecoveryInterval());
}
// set task executor
if (endpoint.getTaskExecutor() != null) {
if (log.isDebugEnabled()) {
log.debug("Using custom TaskExecutor: {} on listener container: {}", endpoint.getTaskExecutor(), answer);
}
answer.setTaskExecutor(endpoint.getTaskExecutor());
}
// setup a bean name which is used by Spring JMS as the thread name
String name = "QueueReplyManager[" + answer.getDestinationName() + "]";
answer.setBeanName(name);
if (answer.getConcurrentConsumers() > 1) {
if (ReplyToType.Shared == type) {
// warn if using concurrent consumer with shared reply queue as that may not work properly
log.warn("Using {}-{} concurrent consumer on {} with shared queue {} may not work properly with all message brokers.",
new Object[]{answer.getConcurrentConsumers(), answer.getMaxConcurrentConsumers(), name, endpoint.getReplyTo()});
} else {
// log that we are using concurrent consumers
log.info("Using {}-{} concurrent consumers on {}",
new Object[]{answer.getConcurrentConsumers(), answer.getMaxConcurrentConsumers(), name});
}
}
return answer;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.distributed;
import java.io.File;
import java.io.IOException;
import java.net.URLClassLoader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.function.IntFunction;
import java.util.stream.Collectors;
import com.google.common.collect.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.NamedThreadFactory;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.ConsistencyLevel;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.diag.DiagnosticEventService;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.schema.SchemaEvent;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.concurrent.SimpleCondition;
/**
* TestCluster creates, initializes and manages Cassandra instances ({@link Instance}.
*
* All instances created under the same cluster will have a shared ClassLoader that'll preload
* common classes required for configuration and communication (byte buffers, primitives, config
* objects etc). Shared classes are listed in {@link InstanceClassLoader#commonClasses}.
*
* Each instance has its own class loader that will load logging, yaml libraries and all non-shared
* Cassandra package classes. The rule of thumb is that we'd like to have all Cassandra-specific things
* (unless explitily shared through the common classloader) on a per-classloader basis in order to
* allow creating more than one instance of DatabaseDescriptor and other Cassandra singletones.
*
* All actions (reading, writing, schema changes, etc) are executed by serializing lambda/runnables,
* transferring them to instance-specific classloaders, deserializing and running them there. Most of
* the things can be simply captured in closure or passed through `apply` method of the wrapped serializable
* function/callable. You can use {@link InvokableInstance#{applies|runs|consumes}OnInstance} for executing
* code on specific instance.
*
* Each instance has its own logger. Each instance log line will contain INSTANCE{instance_id}.
*
* As of today, messaging is faked by hooking into MessagingService, so we're not using usual Cassandra
* handlers for internode to have more control over it. Messaging is wired by passing verbs manually.
* coordinator-handling code and hooks to the callbacks can be found in {@link Coordinator}.
*/
public class TestCluster implements AutoCloseable
{
// WARNING: we have this logger not (necessarily) for logging, but
// to ensure we have instantiated the main classloader's LoggerFactory (and any LogbackStatusListener)
// before we instantiate any for a new instance
private static final Logger logger = LoggerFactory.getLogger(TestCluster.class);
private final File root;
private final List<Instance> instances;
private final Coordinator coordinator;
private final Map<InetAddressAndPort, Instance> instanceMap;
private final MessageFilters filters;
private TestCluster(File root, List<Instance> instances)
{
this.root = root;
this.instances = instances;
this.instanceMap = new HashMap<>();
this.coordinator = new Coordinator(instances.get(0));
this.filters = new MessageFilters(this);
}
void launch()
{
FBUtilities.waitOnFutures(instances.stream()
.map(i -> i.isolatedExecutor.submit(() -> i.launch(this)))
.collect(Collectors.toList())
);
for (Instance instance : instances)
instanceMap.put(instance.getBroadcastAddress(), instance);
}
public int size()
{
return instances.size();
}
public Coordinator coordinator()
{
return coordinator;
}
/**
* WARNING: we index from 1 here, for consistency with inet address!
*/
public Instance get(int idx)
{
return instances.get(idx - 1);
}
public Instance get(InetAddressAndPort addr)
{
return instanceMap.get(addr);
}
MessageFilters filters()
{
return filters;
}
MessageFilters.Builder verbs(MessagingService.Verb ... verbs)
{
return filters.verbs(verbs);
}
public void disableAutoCompaction(String keyspace)
{
for (Instance instance : instances)
{
instance.runOnInstance(() -> {
for (ColumnFamilyStore cs : Keyspace.open(keyspace).getColumnFamilyStores())
cs.disableAutoCompaction();
});
}
}
public void schemaChange(String query)
{
try (SchemaChangeMonitor monitor = new SchemaChangeMonitor())
{
// execute the schema change
coordinator().execute(query, ConsistencyLevel.ALL);
monitor.waitForAgreement();
}
}
/**
* Will wait for a schema change AND agreement that occurs after it is created
* (and precedes the invocation to waitForAgreement)
*
* Works by simply checking if all UUIDs agree after any schema version change event,
* so long as the waitForAgreement method has been entered (indicating the change has
* taken place on the coordinator)
*
* This could perhaps be made a little more robust, but this should more than suffice.
*/
public class SchemaChangeMonitor implements AutoCloseable
{
final List<Runnable> cleanup;
volatile boolean schemaHasChanged;
final SimpleCondition agreement = new SimpleCondition();
public SchemaChangeMonitor()
{
this.cleanup = new ArrayList<>(instances.size());
for (Instance instance : instances)
{
cleanup.add(
instance.appliesOnInstance(
(Runnable runnable) -> {
Consumer<SchemaEvent> consumer = event -> runnable.run();
DiagnosticEventService.instance().subscribe(SchemaEvent.class, SchemaEvent.SchemaEventType.VERSION_UPDATED, consumer);
return (Runnable) () -> DiagnosticEventService.instance().unsubscribe(SchemaEvent.class, consumer);
}
).apply(this::signal)
);
}
}
private void signal()
{
if (schemaHasChanged && 1 == instances.stream().map(Instance::getSchemaVersion).distinct().count())
agreement.signalAll();
}
@Override
public void close()
{
for (Runnable runnable : cleanup)
runnable.run();
}
public void waitForAgreement()
{
schemaHasChanged = true;
signal();
try
{
agreement.await(1L, TimeUnit.MINUTES);
} catch (InterruptedException e)
{
throw new IllegalStateException("Schema agreement not reached");
}
}
}
public void schemaChange(String statement, int instance)
{
get(instance).schemaChange(statement);
}
public static TestCluster create(int nodeCount) throws Throwable
{
return create(nodeCount, Files.createTempDirectory("dtests").toFile());
}
public static TestCluster create(int nodeCount, File root)
{
root.mkdirs();
setupLogging(root);
IntFunction<ClassLoader> classLoaderFactory = InstanceClassLoader.createFactory(
(URLClassLoader) Thread.currentThread().getContextClassLoader());
List<Instance> instances = new ArrayList<>();
long token = Long.MIN_VALUE + 1, increment = 2 * (Long.MAX_VALUE / nodeCount);
for (int i = 0 ; i < nodeCount ; ++i)
{
InstanceConfig instanceConfig = InstanceConfig.generate(i + 1, root, String.valueOf(token));
instances.add(new Instance(instanceConfig, classLoaderFactory.apply(i + 1)));
token += increment;
}
TestCluster cluster = new TestCluster(root, instances);
cluster.launch();
return cluster;
}
private static void setupLogging(File root)
{
try
{
String testConfPath = "test/conf/logback-dtest.xml";
Path logConfPath = Paths.get(root.getPath(), "/logback-dtest.xml");
if (!logConfPath.toFile().exists())
{
Files.copy(new File(testConfPath).toPath(),
logConfPath);
}
System.setProperty("logback.configurationFile", "file://" + logConfPath);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
@Override
public void close()
{
List<Future<?>> futures = instances.stream()
.map(i -> i.isolatedExecutor.submit(i::shutdown))
.collect(Collectors.toList());
// Make sure to only delete directory when threads are stopped
FBUtilities.waitOnFutures(futures, 60, TimeUnit.SECONDS);
FileUtils.deleteRecursive(root);
//withThreadLeakCheck(futures);
System.gc();
}
// We do not want this check to run every time until we fix problems with tread stops
private void withThreadLeakCheck(List<Future<?>> futures)
{
FBUtilities.waitOnFutures(futures);
Set<Thread> threadSet = Thread.getAllStackTraces().keySet();
threadSet = Sets.difference(threadSet, Collections.singletonMap(Thread.currentThread(), null).keySet());
if (!threadSet.isEmpty())
{
for (Thread thread : threadSet)
{
System.out.println(thread);
System.out.println(Arrays.toString(thread.getStackTrace()));
}
throw new RuntimeException(String.format("Not all threads have shut down. %d threads are still running: %s", threadSet.size(), threadSet));
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.query;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.phoenix.execute.DescVarLengthFastByteComparisons;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.util.ByteUtil;
import org.apache.phoenix.util.ScanUtil.BytesComparator;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import edu.umd.cs.findbugs.annotations.NonNull;
/**
*
* Class that represents an upper/lower bound key range.
*
*
* @since 0.1
*/
public class KeyRange implements Writable {
public enum Bound { LOWER, UPPER };
private static final byte[] DEGENERATE_KEY = new byte[] {1};
public static final byte[] UNBOUND = new byte[0];
public static final byte[] NULL_BOUND = new byte[0];
/**
* KeyRange for variable length null values. Since we need to represent this using an empty byte array (which
* is what we use for upper/lower bound), we create this range using the private constructor rather than
* going through the static creation method (where this would not be possible).
*/
public static final KeyRange IS_NULL_RANGE = new KeyRange(NULL_BOUND, true, NULL_BOUND, true);
/**
* KeyRange for non null variable length values. Since we need to represent this using an empty byte array (which
* is what we use for upper/lower bound), we create this range using the private constructor rather than going
* through the static creation method (where this would not be possible).
*/
public static final KeyRange IS_NOT_NULL_RANGE = new KeyRange(ByteUtil.nextKey(QueryConstants.SEPARATOR_BYTE_ARRAY), true, UNBOUND, false);
/**
* KeyRange for an empty key range
*/
public static final KeyRange EMPTY_RANGE = new KeyRange(DEGENERATE_KEY, false, DEGENERATE_KEY, false);
/**
* KeyRange that contains all values
*/
public static final KeyRange EVERYTHING_RANGE = new KeyRange(UNBOUND, false, UNBOUND, false);
public static final Function<byte[], KeyRange> POINT = new Function<byte[], KeyRange>() {
@Override
public KeyRange apply(byte[] input) {
return new KeyRange(input, true, input, true);
}
};
public static final Comparator<KeyRange> COMPARATOR = new Comparator<KeyRange>() {
@Override public int compare(KeyRange o1, KeyRange o2) {
int result = Boolean.compare(o2.lowerUnbound(), o1.lowerUnbound());
if (result != 0) {
return result;
}
result = Bytes.BYTES_COMPARATOR.compare(o1.getLowerRange(), o2.getLowerRange());
if (result != 0) {
return result;
}
result = Boolean.compare(o2.isLowerInclusive(), o1.isLowerInclusive());
if (result != 0) {
return result;
}
result = Boolean.compare(o1.upperUnbound(), o2.upperUnbound());
if (result != 0) {
return result;
}
result = Bytes.BYTES_COMPARATOR.compare(o1.getUpperRange(), o2.getUpperRange());
if (result != 0) {
return result;
}
return Boolean.compare(o2.isUpperInclusive(), o1.isUpperInclusive());
}
};
public static final Comparator<KeyRange> DESC_COMPARATOR = new Comparator<KeyRange>() {
@Override public int compare(KeyRange o1, KeyRange o2) {
int result = Boolean.compare(o2.lowerUnbound(), o1.lowerUnbound());
if (result != 0) {
return result;
}
result = DescVarLengthFastByteComparisons.compareTo(o1.getLowerRange(), 0, o1.getLowerRange().length,
o2.getLowerRange(), 0, o2.getLowerRange().length);
if (result != 0) {
return result;
}
result = Boolean.compare(o2.isLowerInclusive(), o1.isLowerInclusive());
if (result != 0) {
return result;
}
result = Boolean.compare(o1.upperUnbound(), o2.upperUnbound());
if (result != 0) {
return result;
}
result = DescVarLengthFastByteComparisons.compareTo(o1.getUpperRange(), 0, o1.getUpperRange().length,
o2.getUpperRange(), 0, o2.getUpperRange().length);
if (result != 0) {
return result;
}
return Boolean.compare(o2.isUpperInclusive(), o1.isUpperInclusive());
}
};
private byte[] lowerRange;
private boolean lowerInclusive;
private byte[] upperRange;
private boolean upperInclusive;
private boolean isSingleKey;
public static KeyRange getKeyRange(byte[] point) {
return getKeyRange(point, true, point, true);
}
public static KeyRange getKeyRange(byte[] lowerRange, byte[] upperRange) {
return getKeyRange(lowerRange, true, upperRange, false);
}
private static KeyRange getSingleton(byte[] lowerRange, boolean lowerInclusive,
byte[] upperRange, boolean upperInclusive) {
if (lowerRange == null || upperRange == null) {
return EMPTY_RANGE;
}
if (lowerRange.length == 0 && upperRange.length == 0) {
// Need singleton to represent NULL range so it gets treated differently
// than an unbound RANGE.
return lowerInclusive && upperInclusive ? IS_NULL_RANGE : EVERYTHING_RANGE;
}
if ( ( lowerRange.length != 0 || lowerRange == NULL_BOUND ) && ( upperRange.length != 0 || upperRange == NULL_BOUND ) ) {
int cmp = Bytes.compareTo(lowerRange, upperRange);
if (cmp > 0 || (cmp == 0 && !(lowerInclusive && upperInclusive))) {
return EMPTY_RANGE;
}
}
return null;
}
public static KeyRange getKeyRange(byte[] lowerRange, boolean lowerInclusive,
byte[] upperRange, boolean upperInclusive) {
KeyRange range = getSingleton(lowerRange, lowerInclusive, upperRange, upperInclusive);
if (range != null) {
return range;
}
boolean unboundLower = false;
boolean unboundUpper = false;
if (lowerRange.length == 0 && lowerRange != NULL_BOUND) {
lowerRange = UNBOUND;
lowerInclusive = false;
unboundLower = true;
}
if (upperRange.length == 0 && upperRange != NULL_BOUND) {
upperRange = UNBOUND;
upperInclusive = false;
unboundUpper = true;
}
return new KeyRange(lowerRange, unboundLower ? false : lowerInclusive,
upperRange, unboundUpper ? false : upperInclusive);
}
public static KeyRange read(DataInput input) throws IOException {
KeyRange range = new KeyRange();
range.readFields(input);
// Translate to singleton after reading
KeyRange singletonRange = getSingleton(range.lowerRange, range.lowerInclusive, range.upperRange, range.upperInclusive);
if (singletonRange != null) {
return singletonRange;
}
// Otherwise, just keep the range we read
return range;
}
private KeyRange() {
this.lowerRange = DEGENERATE_KEY;
this.lowerInclusive = false;
this.upperRange = DEGENERATE_KEY;
this.upperInclusive = false;
this.isSingleKey = false;
}
private KeyRange(byte[] lowerRange, boolean lowerInclusive, byte[] upperRange, boolean upperInclusive) {
this.lowerRange = lowerRange;
this.lowerInclusive = lowerInclusive;
this.upperRange = upperRange;
this.upperInclusive = upperInclusive;
init();
}
private void init() {
this.isSingleKey = lowerRange != UNBOUND && upperRange != UNBOUND
&& lowerInclusive && upperInclusive && Bytes.compareTo(lowerRange, upperRange) == 0;
}
public byte[] getRange(Bound bound) {
return bound == Bound.LOWER ? getLowerRange() : getUpperRange();
}
public boolean isInclusive(Bound bound) {
return bound == Bound.LOWER ? isLowerInclusive() : isUpperInclusive();
}
public boolean isUnbound(Bound bound) {
return bound == Bound.LOWER ? lowerUnbound() : upperUnbound();
}
public boolean isSingleKey() {
return isSingleKey;
}
public int compareLowerToUpperBound(ImmutableBytesWritable ptr, boolean isInclusive, BytesComparator comparator) {
return compareLowerToUpperBound(ptr.get(), ptr.getOffset(), ptr.getLength(), isInclusive, comparator);
}
public int compareLowerToUpperBound(ImmutableBytesWritable ptr, BytesComparator comparator) {
return compareLowerToUpperBound(ptr, true, comparator);
}
public int compareUpperToLowerBound(ImmutableBytesWritable ptr, boolean isInclusive, BytesComparator comparator) {
return compareUpperToLowerBound(ptr.get(), ptr.getOffset(), ptr.getLength(), isInclusive, comparator);
}
public int compareUpperToLowerBound(ImmutableBytesWritable ptr, BytesComparator comparator) {
return compareUpperToLowerBound(ptr, true, comparator);
}
public int compareLowerToUpperBound( byte[] b, int o, int l, BytesComparator comparator) {
return compareLowerToUpperBound(b,o,l,true, comparator);
}
public int compareLowerToUpperBound( byte[] b, BytesComparator comparator) {
return compareLowerToUpperBound(b,0,b.length, comparator);
}
/**
* Compares a lower bound against an upper bound
* @param b upper bound byte array
* @param o upper bound offset
* @param l upper bound length
* @param isInclusive upper bound inclusive
* @param comparator comparator used to do compare the byte array using offset and length
* @return -1 if the lower bound is less than the upper bound,
* 1 if the lower bound is greater than the upper bound,
* and 0 if they are equal.
*/
public int compareLowerToUpperBound( byte[] b, int o, int l, boolean isInclusive, BytesComparator comparator) {
if (lowerUnbound() || b == KeyRange.UNBOUND) {
return -1;
}
int cmp = comparator.compare(lowerRange, 0, lowerRange.length, b, o, l);
if (cmp > 0) {
return 1;
}
if (cmp < 0) {
return -1;
}
if (lowerInclusive && isInclusive) {
return 0;
}
return 1;
}
public int compareUpperToLowerBound(byte[] b, BytesComparator comparator) {
return compareUpperToLowerBound(b,0,b.length, comparator);
}
public int compareUpperToLowerBound(byte[] b, int o, int l, BytesComparator comparator) {
return compareUpperToLowerBound(b,o,l, true, comparator);
}
public int compareUpperToLowerBound(byte[] b, int o, int l, boolean isInclusive, BytesComparator comparator) {
if (upperUnbound() || b == KeyRange.UNBOUND) {
return 1;
}
int cmp = comparator.compare(upperRange, 0, upperRange.length, b, o, l);
if (cmp > 0) {
return 1;
}
if (cmp < 0) {
return -1;
}
if (upperInclusive && isInclusive) {
return 0;
}
return -1;
}
public byte[] getLowerRange() {
return lowerRange;
}
public boolean isLowerInclusive() {
return lowerInclusive;
}
public byte[] getUpperRange() {
return upperRange;
}
public boolean isUpperInclusive() {
return upperInclusive;
}
public boolean isUnbound() {
return lowerUnbound() || upperUnbound();
}
public boolean upperUnbound() {
return upperRange == UNBOUND;
}
public boolean lowerUnbound() {
return lowerRange == UNBOUND;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(lowerRange);
if (lowerRange != null)
result = prime * result + (lowerInclusive ? 1231 : 1237);
result = prime * result + Arrays.hashCode(upperRange);
if (upperRange != null)
result = prime * result + (upperInclusive ? 1231 : 1237);
return result;
}
@Override
public String toString() {
if (isSingleKey()) {
return Bytes.toStringBinary(lowerRange);
}
return (lowerInclusive ? "[" :
"(") + (lowerUnbound() ? "*" :
Bytes.toStringBinary(lowerRange)) + " - " + (upperUnbound() ? "*" :
Bytes.toStringBinary(upperRange)) + (upperInclusive ? "]" : ")" );
}
@Override
public boolean equals(Object o) {
if (!(o instanceof KeyRange)) {
return false;
}
KeyRange that = (KeyRange)o;
return Bytes.compareTo(this.lowerRange,that.lowerRange) == 0 && this.lowerInclusive == that.lowerInclusive &&
Bytes.compareTo(this.upperRange, that.upperRange) == 0 && this.upperInclusive == that.upperInclusive;
}
public KeyRange intersect(KeyRange range) {
byte[] newLowerRange;
byte[] newUpperRange;
boolean newLowerInclusive;
boolean newUpperInclusive;
// Special case for null, is it is never included another range
// except for null itself.
if (this == IS_NULL_RANGE && range == IS_NULL_RANGE) {
return IS_NULL_RANGE;
} else if(this == IS_NULL_RANGE || range == IS_NULL_RANGE) {
return EMPTY_RANGE;
}
if (lowerUnbound()) {
newLowerRange = range.lowerRange;
newLowerInclusive = range.lowerInclusive;
} else if (range.lowerUnbound()) {
newLowerRange = lowerRange;
newLowerInclusive = lowerInclusive;
} else {
int cmp = Bytes.compareTo(lowerRange, range.lowerRange);
if (cmp != 0 || lowerInclusive == range.lowerInclusive) {
if (cmp <= 0) {
newLowerRange = range.lowerRange;
newLowerInclusive = range.lowerInclusive;
} else {
newLowerRange = lowerRange;
newLowerInclusive = lowerInclusive;
}
} else { // Same lower range, but one is not inclusive
newLowerRange = range.lowerRange;
newLowerInclusive = false;
}
}
if (upperUnbound()) {
newUpperRange = range.upperRange;
newUpperInclusive = range.upperInclusive;
} else if (range.upperUnbound()) {
newUpperRange = upperRange;
newUpperInclusive = upperInclusive;
} else {
int cmp = Bytes.compareTo(upperRange, range.upperRange);
if (cmp != 0 || upperInclusive == range.upperInclusive) {
if (cmp >= 0) {
newUpperRange = range.upperRange;
newUpperInclusive = range.upperInclusive;
} else {
newUpperRange = upperRange;
newUpperInclusive = upperInclusive;
}
} else { // Same upper range, but one is not inclusive
newUpperRange = range.upperRange;
newUpperInclusive = false;
}
}
if (newLowerRange == lowerRange && newLowerInclusive == lowerInclusive
&& newUpperRange == upperRange && newUpperInclusive == upperInclusive) {
return this;
}
return getKeyRange(newLowerRange, newLowerInclusive, newUpperRange, newUpperInclusive);
}
public static boolean isDegenerate(byte[] lowerRange, byte[] upperRange) {
return lowerRange == KeyRange.EMPTY_RANGE.getLowerRange() && upperRange == KeyRange.EMPTY_RANGE.getUpperRange();
}
public static boolean areAllSingleKey(List<KeyRange> rowKeyRanges) {
if(rowKeyRanges == null || rowKeyRanges.isEmpty()) {
return false;
}
for(KeyRange rowKeyRange : rowKeyRanges) {
if(!rowKeyRange.isSingleKey()) {
return false;
}
}
return true;
}
/**
* @return list of at least size 1
*/
@NonNull
public static List<KeyRange> coalesce(List<KeyRange> keyRanges) {
List<KeyRange> tmp = new ArrayList<KeyRange>();
for (KeyRange keyRange : keyRanges) {
if (EMPTY_RANGE == keyRange) {
continue;
}
if (EVERYTHING_RANGE == keyRange) {
tmp.clear();
tmp.add(keyRange);
break;
}
tmp.add(keyRange);
}
if (tmp.size() == 1) {
return tmp;
}
if (tmp.size() == 0) {
return Collections.singletonList(EMPTY_RANGE);
}
Collections.sort(tmp, COMPARATOR);
List<KeyRange> tmp2 = new ArrayList<KeyRange>();
KeyRange range = tmp.get(0);
for (int i=1; i<tmp.size(); i++) {
KeyRange otherRange = tmp.get(i);
KeyRange intersect = range.intersect(otherRange);
if (EMPTY_RANGE == intersect) {
tmp2.add(range);
range = otherRange;
} else {
range = range.union(otherRange);
}
}
tmp2.add(range);
List<KeyRange> tmp3 = new ArrayList<KeyRange>();
range = tmp2.get(0);
for (int i=1; i<tmp2.size(); i++) {
KeyRange otherRange = tmp2.get(i);
assert !range.upperUnbound();
assert !otherRange.lowerUnbound();
if (range.isUpperInclusive() != otherRange.isLowerInclusive()
&& Bytes.equals(range.getUpperRange(), otherRange.getLowerRange())) {
range = KeyRange.getKeyRange(range.getLowerRange(), range.isLowerInclusive(), otherRange.getUpperRange(), otherRange.isUpperInclusive());
} else {
tmp3.add(range);
range = otherRange;
}
}
tmp3.add(range);
return tmp3;
}
public KeyRange union(KeyRange other) {
if (EMPTY_RANGE == other) return this;
if (EMPTY_RANGE == this) return other;
byte[] newLower, newUpper;
boolean newLowerInclusive, newUpperInclusive;
if (this.lowerUnbound() || other.lowerUnbound()) {
newLower = UNBOUND;
newLowerInclusive = false;
} else {
int lowerCmp = Bytes.compareTo(this.lowerRange, other.lowerRange);
if (lowerCmp < 0) {
newLower = lowerRange;
newLowerInclusive = lowerInclusive;
} else if (lowerCmp == 0) {
newLower = lowerRange;
newLowerInclusive = this.lowerInclusive || other.lowerInclusive;
} else {
newLower = other.lowerRange;
newLowerInclusive = other.lowerInclusive;
}
}
if (this.upperUnbound() || other.upperUnbound()) {
newUpper = UNBOUND;
newUpperInclusive = false;
} else {
int upperCmp = Bytes.compareTo(this.upperRange, other.upperRange);
if (upperCmp > 0) {
newUpper = upperRange;
newUpperInclusive = this.upperInclusive;
} else if (upperCmp == 0) {
newUpper = upperRange;
newUpperInclusive = this.upperInclusive || other.upperInclusive;
} else {
newUpper = other.upperRange;
newUpperInclusive = other.upperInclusive;
}
}
return KeyRange.getKeyRange(newLower, newLowerInclusive, newUpper, newUpperInclusive);
}
public static List<KeyRange> of(List<byte[]> keys) {
return Lists.transform(keys, POINT);
}
public static int compareUpperRange(KeyRange rowKeyRange1,KeyRange rowKeyRange2) {
int result = Boolean.compare(rowKeyRange1.upperUnbound(), rowKeyRange2.upperUnbound());
if (result != 0) {
return result;
}
result = Bytes.BYTES_COMPARATOR.compare(rowKeyRange1.getUpperRange(), rowKeyRange2.getUpperRange());
if (result != 0) {
return result;
}
return Boolean.compare(rowKeyRange1.isUpperInclusive(), rowKeyRange2.isUpperInclusive());
}
public static List<KeyRange> intersect(List<KeyRange> rowKeyRanges1, List<KeyRange> rowKeyRanges2) {
List<KeyRange> newRowKeyRanges1=coalesce(rowKeyRanges1);
List<KeyRange> newRowKeyRanges2=coalesce(rowKeyRanges2);
Iterator<KeyRange> iter1=newRowKeyRanges1.iterator();
Iterator<KeyRange> iter2=newRowKeyRanges2.iterator();
List<KeyRange> result = new LinkedList<KeyRange>();
KeyRange rowKeyRange1=null;
KeyRange rowKeyRange2=null;
while(true) {
if(rowKeyRange1==null) {
if(!iter1.hasNext()) {
break;
}
rowKeyRange1=iter1.next();
}
if(rowKeyRange2==null) {
if(!iter2.hasNext()) {
break;
}
rowKeyRange2=iter2.next();
}
KeyRange intersectedRowKeyRange=rowKeyRange1.intersect(rowKeyRange2);
if(intersectedRowKeyRange!=EMPTY_RANGE) {
result.add(intersectedRowKeyRange);
}
int cmp=compareUpperRange(rowKeyRange1, rowKeyRange2);
if(cmp < 0) {
//move iter1
rowKeyRange1=null;
} else if(cmp > 0) {
//move iter2
rowKeyRange2=null;
} else {
//move iter1 and iter2
rowKeyRange1=rowKeyRange2=null;
}
}
if (result.size() == 0) {
return Collections.singletonList(KeyRange.EMPTY_RANGE);
}
return result;
}
public KeyRange invert() {
// these special ranges do not get inverted because we
// represent NULL in the same way for ASC and DESC.
if (this == IS_NOT_NULL_RANGE || this == IS_NULL_RANGE) {
return this;
}
byte[] lowerBound = this.getLowerRange();
if (!this.lowerUnbound()) {
lowerBound = SortOrder.invert(lowerBound, 0, lowerBound.length);
}
byte[] upperBound;
if (this.isSingleKey()) {
upperBound = lowerBound;
} else {
upperBound = this.getUpperRange();
if (!this.upperUnbound()) {
upperBound = SortOrder.invert(upperBound, 0, upperBound.length);
}
}
return KeyRange.getKeyRange(upperBound, this.isUpperInclusive(), lowerBound, this.isLowerInclusive());
}
@Override
public void readFields(DataInput in) throws IOException {
int len = WritableUtils.readVInt(in);
if (len == 0) {
lowerRange = KeyRange.UNBOUND;
lowerInclusive = false;
} else {
if (len < 0) {
lowerInclusive = false;
lowerRange = new byte[-len - 1];
in.readFully(lowerRange);
} else {
lowerInclusive = true;
lowerRange = new byte[len - 1];
in.readFully(lowerRange);
}
}
len = WritableUtils.readVInt(in);
if (len == 0) {
upperRange = KeyRange.UNBOUND;
upperInclusive = false;
} else {
if (len < 0) {
upperInclusive = false;
upperRange = new byte[-len - 1];
in.readFully(upperRange);
} else {
upperInclusive = true;
upperRange = new byte[len - 1];
in.readFully(upperRange);
}
}
init();
}
private void writeBound(Bound bound, DataOutput out) throws IOException {
// Encode unbound by writing a zero
if (isUnbound(bound)) {
WritableUtils.writeVInt(out, 0);
return;
}
// Otherwise, inclusive is positive and exclusive is negative, offset by 1
byte[] range = getRange(bound);
if (isInclusive(bound)){
WritableUtils.writeVInt(out, range.length+1);
} else {
WritableUtils.writeVInt(out, -(range.length+1));
}
out.write(range);
}
@Override
public void write(DataOutput out) throws IOException {
writeBound(Bound.LOWER, out);
writeBound(Bound.UPPER, out);
}
public KeyRange prependRange(byte[] bytes, int offset, int length) {
if (length == 0 || this == EVERYTHING_RANGE) {
return this;
}
byte[] lowerRange = this.getLowerRange();
if (!this.lowerUnbound()) {
byte[] newLowerRange = new byte[length + lowerRange.length];
System.arraycopy(bytes, offset, newLowerRange, 0, length);
System.arraycopy(lowerRange, 0, newLowerRange, length, lowerRange.length);
lowerRange = newLowerRange;
}
byte[] upperRange = this.getUpperRange();
if (!this.upperUnbound()) {
byte[] newUpperRange = new byte[length + upperRange.length];
System.arraycopy(bytes, offset, newUpperRange, 0, length);
System.arraycopy(upperRange, 0, newUpperRange, length, upperRange.length);
upperRange = newUpperRange;
}
return getKeyRange(lowerRange, lowerInclusive, upperRange, upperInclusive);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.agent.core.context;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.skywalking.apm.agent.core.boot.ServiceManager;
import org.apache.skywalking.apm.agent.core.conf.Config;
import org.apache.skywalking.apm.agent.core.context.ids.DistributedTraceId;
import org.apache.skywalking.apm.agent.core.context.ids.PropagatedTraceId;
import org.apache.skywalking.apm.agent.core.context.trace.AbstractSpan;
import org.apache.skywalking.apm.agent.core.context.trace.AbstractTracingSpan;
import org.apache.skywalking.apm.agent.core.context.trace.EntrySpan;
import org.apache.skywalking.apm.agent.core.context.trace.ExitSpan;
import org.apache.skywalking.apm.agent.core.context.trace.ExitTypeSpan;
import org.apache.skywalking.apm.agent.core.context.trace.LocalSpan;
import org.apache.skywalking.apm.agent.core.context.trace.NoopExitSpan;
import org.apache.skywalking.apm.agent.core.context.trace.NoopSpan;
import org.apache.skywalking.apm.agent.core.context.trace.TraceSegment;
import org.apache.skywalking.apm.agent.core.context.trace.TraceSegmentRef;
import org.apache.skywalking.apm.agent.core.logging.api.ILog;
import org.apache.skywalking.apm.agent.core.logging.api.LogManager;
import org.apache.skywalking.apm.agent.core.profile.ProfileStatusReference;
import org.apache.skywalking.apm.agent.core.profile.ProfileTaskExecutionService;
import org.apache.skywalking.apm.util.StringUtil;
/**
* The <code>TracingContext</code> represents a core tracing logic controller. It build the final {@link
* TracingContext}, by the stack mechanism, which is similar with the codes work.
* <p>
* In opentracing concept, it means, all spans in a segment tracing context(thread) are CHILD_OF relationship, but no
* FOLLOW_OF.
* <p>
* In skywalking core concept, FOLLOW_OF is an abstract concept when cross-process MQ or cross-thread async/batch tasks
* happen, we used {@link TraceSegmentRef} for these scenarios. Check {@link TraceSegmentRef} which is from {@link
* ContextCarrier} or {@link ContextSnapshot}.
*/
public class TracingContext implements AbstractTracerContext {
private static final ILog LOGGER = LogManager.getLogger(TracingContext.class);
private long lastWarningTimestamp = 0;
/**
* @see ProfileTaskExecutionService
*/
private static ProfileTaskExecutionService PROFILE_TASK_EXECUTION_SERVICE;
/**
* The final {@link TraceSegment}, which includes all finished spans.
*/
private TraceSegment segment;
/**
* Active spans stored in a Stack, usually called 'ActiveSpanStack'. This {@link LinkedList} is the in-memory
* storage-structure. <p> I use {@link LinkedList#removeLast()}, {@link LinkedList#addLast(Object)} and {@link
* LinkedList#getLast()} instead of {@link #pop()}, {@link #push(AbstractSpan)}, {@link #peek()}
*/
private LinkedList<AbstractSpan> activeSpanStack = new LinkedList<>();
/**
* @since 7.0.0 SkyWalking support lazy injection through {@link ExitTypeSpan#inject(ContextCarrier)}. Due to that,
* the {@link #activeSpanStack} could be blank by then, this is a pointer forever to the first span, even the main
* thread tracing has been finished.
*/
private AbstractSpan firstSpan = null;
/**
* A counter for the next span.
*/
private int spanIdGenerator;
/**
* The counter indicates
*/
@SuppressWarnings("unused") // updated by ASYNC_SPAN_COUNTER_UPDATER
private volatile int asyncSpanCounter;
private static final AtomicIntegerFieldUpdater<TracingContext> ASYNC_SPAN_COUNTER_UPDATER =
AtomicIntegerFieldUpdater.newUpdater(TracingContext.class, "asyncSpanCounter");
private volatile boolean isRunningInAsyncMode;
private volatile ReentrantLock asyncFinishLock;
private volatile boolean running;
private final long createTime;
/**
* profile status
*/
private final ProfileStatusReference profileStatus;
private final CorrelationContext correlationContext;
private final ExtensionContext extensionContext;
/**
* Initialize all fields with default value.
*/
TracingContext(String firstOPName) {
this.segment = new TraceSegment();
this.spanIdGenerator = 0;
isRunningInAsyncMode = false;
createTime = System.currentTimeMillis();
running = true;
// profiling status
if (PROFILE_TASK_EXECUTION_SERVICE == null) {
PROFILE_TASK_EXECUTION_SERVICE = ServiceManager.INSTANCE.findService(ProfileTaskExecutionService.class);
}
this.profileStatus = PROFILE_TASK_EXECUTION_SERVICE.addProfiling(
this, segment.getTraceSegmentId(), firstOPName);
this.correlationContext = new CorrelationContext();
this.extensionContext = new ExtensionContext();
}
/**
* Inject the context into the given carrier, only when the active span is an exit one.
*
* @param carrier to carry the context for crossing process.
* @throws IllegalStateException if (1) the active span isn't an exit one. (2) doesn't include peer. Ref to {@link
* AbstractTracerContext#inject(ContextCarrier)}
*/
@Override
public void inject(ContextCarrier carrier) {
this.inject(this.activeSpan(), carrier);
}
/**
* Inject the context into the given carrier and given span, only when the active span is an exit one. This method
* wouldn't be opened in {@link ContextManager} like {@link #inject(ContextCarrier)}, it is only supported to be
* called inside the {@link ExitTypeSpan#inject(ContextCarrier)}
*
* @param carrier to carry the context for crossing process.
* @param exitSpan to represent the scope of current injection.
* @throws IllegalStateException if (1) the span isn't an exit one. (2) doesn't include peer.
*/
public void inject(AbstractSpan exitSpan, ContextCarrier carrier) {
if (!exitSpan.isExit()) {
throw new IllegalStateException("Inject can be done only in Exit Span");
}
ExitTypeSpan spanWithPeer = (ExitTypeSpan) exitSpan;
String peer = spanWithPeer.getPeer();
if (StringUtil.isEmpty(peer)) {
throw new IllegalStateException("Exit span doesn't include meaningful peer information.");
}
carrier.setTraceId(getReadablePrimaryTraceId());
carrier.setTraceSegmentId(this.segment.getTraceSegmentId());
carrier.setSpanId(exitSpan.getSpanId());
carrier.setParentService(Config.Agent.SERVICE_NAME);
carrier.setParentServiceInstance(Config.Agent.INSTANCE_NAME);
carrier.setParentEndpoint(first().getOperationName());
carrier.setAddressUsedAtClient(peer);
this.correlationContext.inject(carrier);
this.extensionContext.inject(carrier);
}
/**
* Extract the carrier to build the reference for the pre segment.
*
* @param carrier carried the context from a cross-process segment. Ref to {@link AbstractTracerContext#extract(ContextCarrier)}
*/
@Override
public void extract(ContextCarrier carrier) {
TraceSegmentRef ref = new TraceSegmentRef(carrier);
this.segment.ref(ref);
this.segment.relatedGlobalTraces(new PropagatedTraceId(carrier.getTraceId()));
AbstractSpan span = this.activeSpan();
if (span instanceof EntrySpan) {
span.ref(ref);
}
this.correlationContext.extract(carrier);
this.extensionContext.extract(carrier);
this.extensionContext.handle(span);
}
/**
* Capture the snapshot of current context.
*
* @return the snapshot of context for cross-thread propagation Ref to {@link AbstractTracerContext#capture()}
*/
@Override
public ContextSnapshot capture() {
ContextSnapshot snapshot = new ContextSnapshot(
segment.getTraceSegmentId(),
activeSpan().getSpanId(),
getPrimaryTraceId(),
first().getOperationName(),
this.correlationContext,
this.extensionContext
);
return snapshot;
}
/**
* Continue the context from the given snapshot of parent thread.
*
* @param snapshot from {@link #capture()} in the parent thread. Ref to {@link AbstractTracerContext#continued(ContextSnapshot)}
*/
@Override
public void continued(ContextSnapshot snapshot) {
if (snapshot.isValid()) {
TraceSegmentRef segmentRef = new TraceSegmentRef(snapshot);
this.segment.ref(segmentRef);
this.activeSpan().ref(segmentRef);
this.segment.relatedGlobalTraces(snapshot.getTraceId());
this.correlationContext.continued(snapshot);
this.extensionContext.continued(snapshot);
this.extensionContext.handle(this.activeSpan());
}
}
/**
* @return the first global trace id.
*/
@Override
public String getReadablePrimaryTraceId() {
return getPrimaryTraceId().getId();
}
private DistributedTraceId getPrimaryTraceId() {
return segment.getRelatedGlobalTraces().get(0);
}
/**
* Create an entry span
*
* @param operationName most likely a service name
* @return span instance. Ref to {@link EntrySpan}
*/
@Override
public AbstractSpan createEntrySpan(final String operationName) {
if (isLimitMechanismWorking()) {
NoopSpan span = new NoopSpan();
return push(span);
}
AbstractSpan entrySpan;
TracingContext owner = this;
final AbstractSpan parentSpan = peek();
final int parentSpanId = parentSpan == null ? -1 : parentSpan.getSpanId();
if (parentSpan != null && parentSpan.isEntry()) {
/*
* Only add the profiling recheck on creating entry span,
* as the operation name could be overrided.
*/
profilingRecheck(parentSpan, operationName);
parentSpan.setOperationName(operationName);
entrySpan = parentSpan;
return entrySpan.start();
} else {
entrySpan = new EntrySpan(
spanIdGenerator++, parentSpanId,
operationName, owner
);
entrySpan.start();
return push(entrySpan);
}
}
/**
* Create a local span
*
* @param operationName most likely a local method signature, or business name.
* @return the span represents a local logic block. Ref to {@link LocalSpan}
*/
@Override
public AbstractSpan createLocalSpan(final String operationName) {
if (isLimitMechanismWorking()) {
NoopSpan span = new NoopSpan();
return push(span);
}
AbstractSpan parentSpan = peek();
final int parentSpanId = parentSpan == null ? -1 : parentSpan.getSpanId();
AbstractTracingSpan span = new LocalSpan(spanIdGenerator++, parentSpanId, operationName, this);
span.start();
return push(span);
}
/**
* Create an exit span
*
* @param operationName most likely a service name of remote
* @param remotePeer the network id(ip:port, hostname:port or ip1:port1,ip2,port, etc.). Remote peer could be set
* later, but must be before injecting.
* @return the span represent an exit point of this segment.
* @see ExitSpan
*/
@Override
public AbstractSpan createExitSpan(final String operationName, final String remotePeer) {
if (isLimitMechanismWorking()) {
NoopExitSpan span = new NoopExitSpan(remotePeer);
return push(span);
}
AbstractSpan exitSpan;
AbstractSpan parentSpan = peek();
TracingContext owner = this;
if (parentSpan != null && parentSpan.isExit()) {
exitSpan = parentSpan;
} else {
final int parentSpanId = parentSpan == null ? -1 : parentSpan.getSpanId();
exitSpan = new ExitSpan(spanIdGenerator++, parentSpanId, operationName, remotePeer, owner);
push(exitSpan);
}
exitSpan.start();
return exitSpan;
}
/**
* @return the active span of current context, the top element of {@link #activeSpanStack}
*/
@Override
public AbstractSpan activeSpan() {
AbstractSpan span = peek();
if (span == null) {
throw new IllegalStateException("No active span.");
}
return span;
}
/**
* Stop the given span, if and only if this one is the top element of {@link #activeSpanStack}. Because the tracing
* core must make sure the span must match in a stack module, like any program did.
*
* @param span to finish
*/
@Override
public boolean stopSpan(AbstractSpan span) {
AbstractSpan lastSpan = peek();
if (lastSpan == span) {
if (lastSpan instanceof AbstractTracingSpan) {
AbstractTracingSpan toFinishSpan = (AbstractTracingSpan) lastSpan;
if (toFinishSpan.finish(segment)) {
pop();
}
} else {
pop();
}
} else {
throw new IllegalStateException("Stopping the unexpected span = " + span);
}
finish();
return activeSpanStack.isEmpty();
}
@Override
public AbstractTracerContext awaitFinishAsync() {
if (!isRunningInAsyncMode) {
synchronized (this) {
if (!isRunningInAsyncMode) {
asyncFinishLock = new ReentrantLock();
ASYNC_SPAN_COUNTER_UPDATER.set(this, 0);
isRunningInAsyncMode = true;
}
}
}
ASYNC_SPAN_COUNTER_UPDATER.incrementAndGet(this);
return this;
}
@Override
public void asyncStop(AsyncSpan span) {
ASYNC_SPAN_COUNTER_UPDATER.decrementAndGet(this);
finish();
}
@Override
public CorrelationContext getCorrelationContext() {
return this.correlationContext;
}
/**
* Re-check current trace need profiling, encase third part plugin change the operation name.
*
* @param span current modify span
* @param operationName change to operation name
*/
public void profilingRecheck(AbstractSpan span, String operationName) {
// only recheck first span
if (span.getSpanId() != 0) {
return;
}
PROFILE_TASK_EXECUTION_SERVICE.profilingRecheck(this, segment.getTraceSegmentId(), operationName);
}
/**
* Finish this context, and notify all {@link TracingContextListener}s, managed by {@link
* TracingContext.ListenerManager} and {@link TracingContext.TracingThreadListenerManager}
*/
private void finish() {
if (isRunningInAsyncMode) {
asyncFinishLock.lock();
}
try {
boolean isFinishedInMainThread = activeSpanStack.isEmpty() && running;
if (isFinishedInMainThread) {
/*
* Notify after tracing finished in the main thread.
*/
TracingThreadListenerManager.notifyFinish(this);
}
if (isFinishedInMainThread && (!isRunningInAsyncMode || asyncSpanCounter == 0)) {
TraceSegment finishedSegment = segment.finish(isLimitMechanismWorking());
TracingContext.ListenerManager.notifyFinish(finishedSegment);
running = false;
}
} finally {
if (isRunningInAsyncMode) {
asyncFinishLock.unlock();
}
}
}
/**
* The <code>ListenerManager</code> represents an event notify for every registered listener, which are notified
* when the <code>TracingContext</code> finished, and {@link #segment} is ready for further process.
*/
public static class ListenerManager {
private static List<TracingContextListener> LISTENERS = new LinkedList<>();
/**
* Add the given {@link TracingContextListener} to {@link #LISTENERS} list.
*
* @param listener the new listener.
*/
public static synchronized void add(TracingContextListener listener) {
LISTENERS.add(listener);
}
/**
* Notify the {@link TracingContext.ListenerManager} about the given {@link TraceSegment} have finished. And
* trigger {@link TracingContext.ListenerManager} to notify all {@link #LISTENERS} 's {@link
* TracingContextListener#afterFinished(TraceSegment)}
*
* @param finishedSegment the segment that has finished
*/
static void notifyFinish(TraceSegment finishedSegment) {
for (TracingContextListener listener : LISTENERS) {
listener.afterFinished(finishedSegment);
}
}
/**
* Clear the given {@link TracingContextListener}
*/
public static synchronized void remove(TracingContextListener listener) {
LISTENERS.remove(listener);
}
}
/**
* The <code>ListenerManager</code> represents an event notify for every registered listener, which are notified
*/
public static class TracingThreadListenerManager {
private static List<TracingThreadListener> LISTENERS = new LinkedList<>();
public static synchronized void add(TracingThreadListener listener) {
LISTENERS.add(listener);
}
static void notifyFinish(TracingContext finishedContext) {
for (TracingThreadListener listener : LISTENERS) {
listener.afterMainThreadFinish(finishedContext);
}
}
public static synchronized void remove(TracingThreadListener listener) {
LISTENERS.remove(listener);
}
}
/**
* @return the top element of 'ActiveSpanStack', and remove it.
*/
private AbstractSpan pop() {
return activeSpanStack.removeLast();
}
/**
* Add a new Span at the top of 'ActiveSpanStack'
*
* @param span the {@code span} to push
*/
private AbstractSpan push(AbstractSpan span) {
if (firstSpan == null) {
firstSpan = span;
}
activeSpanStack.addLast(span);
this.extensionContext.handle(span);
return span;
}
/**
* @return the top element of 'ActiveSpanStack' only.
*/
private AbstractSpan peek() {
if (activeSpanStack.isEmpty()) {
return null;
}
return activeSpanStack.getLast();
}
private AbstractSpan first() {
return firstSpan;
}
private boolean isLimitMechanismWorking() {
if (spanIdGenerator >= Config.Agent.SPAN_LIMIT_PER_SEGMENT) {
long currentTimeMillis = System.currentTimeMillis();
if (currentTimeMillis - lastWarningTimestamp > 30 * 1000) {
LOGGER.warn(
new RuntimeException("Shadow tracing context. Thread dump"),
"More than {} spans required to create", Config.Agent.SPAN_LIMIT_PER_SEGMENT
);
lastWarningTimestamp = currentTimeMillis;
}
return true;
} else {
return false;
}
}
public long createTime() {
return this.createTime;
}
public ProfileStatusReference profileStatus() {
return this.profileStatus;
}
}
|
|
package net.cattaka.android.adaptertoolbox.adapter.listener;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.widget.RecyclerView;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.KeyEvent;
import android.view.View;
import android.widget.AdapterView;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.RadioGroup;
import android.widget.SeekBar;
import android.widget.TextView;
/**
* Created by cattaka on 2016/05/12.
*/
public class ForwardingListener<A extends RecyclerView.Adapter<? extends VH>, VH extends RecyclerView.ViewHolder>
implements IForwardingListener<A, VH>,
View.OnClickListener,
View.OnLongClickListener,
RadioGroup.OnCheckedChangeListener,
CompoundButton.OnCheckedChangeListener,
SeekBar.OnSeekBarChangeListener,
AdapterView.OnItemSelectedListener,
TextView.OnEditorActionListener {
private IProvider<A, VH> mProvider;
private ListenerRelay<A, VH> mListenerRelay;
public ForwardingListener() {
}
protected ListenerRelay<A, VH> getListenerRelay() {
return mListenerRelay;
}
public void setListenerRelay(@Nullable ListenerRelay<A, VH> listenerRelay) {
mListenerRelay = listenerRelay;
}
protected IProvider<A, VH> getProvider() {
return mProvider;
}
@Override
public void setProvider(@NonNull IProvider<A, VH> provider) {
mProvider = provider;
}
/**
* @see android.view.View.OnClickListener
*/
@Override
public void onClick(View view) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, view) : null);
if (vh != null) {
mListenerRelay.onClick(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, view);
}
}
}
/**
* @see android.view.View.OnLongClickListener
*/
@Override
public boolean onLongClick(View view) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, view) : null);
if (vh != null) {
return mListenerRelay.onLongClick(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, view);
}
}
return false;
}
/**
* @see android.widget.RadioGroup.OnCheckedChangeListener
*/
@Override
public void onCheckedChanged(RadioGroup group, int checkedId) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, group) : null);
if (vh != null) {
mListenerRelay.onCheckedChanged(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, group, checkedId);
}
}
}
/**
* @see android.widget.CompoundButton.OnCheckedChangeListener
*/
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, buttonView) : null);
if (vh != null) {
mListenerRelay.onCheckedChanged(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, buttonView, isChecked);
}
}
}
/**
* @see android.widget.SeekBar.OnSeekBarChangeListener
*/
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, seekBar) : null);
if (vh != null) {
mListenerRelay.onProgressChanged(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, seekBar, progress, fromUser);
}
}
}
/**
* @see android.widget.SeekBar.OnSeekBarChangeListener
*/
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, seekBar) : null);
if (vh != null) {
mListenerRelay.onStartTrackingTouch(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, seekBar);
}
}
}
/**
* @see android.widget.SeekBar.OnSeekBarChangeListener
*/
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, seekBar) : null);
if (vh != null) {
mListenerRelay.onStopTrackingTouch(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, seekBar);
}
}
}
/**
* @see android.widget.AdapterView.OnItemSelectedListener
*/
@Override
public void onNothingSelected(AdapterView<?> parent) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, parent) : null);
if (vh != null) {
mListenerRelay.onNothingSelected(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, parent);
}
}
}
/**
* @see android.widget.AdapterView.OnItemSelectedListener
*/
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, parent) : null);
if (vh != null) {
mListenerRelay.onItemSelected(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, parent, view, position, id);
}
}
}
/**
* @see android.widget.TextView.OnEditorActionListener
*/
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, v) : null);
if (vh != null) {
return mListenerRelay.onEditorAction(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, v, actionId, event);
}
}
return false;
}
/**
* @see TextWatcher
*/
public void addTextChangedListener(@NonNull final EditText target) {
target.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, target) : null);
if (vh != null) {
mListenerRelay.beforeTextChanged(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, target, s, start, count, after);
}
}
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, target) : null);
if (vh != null) {
mListenerRelay.onTextChanged(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, target, s, start, count, count);
}
}
}
@Override
public void afterTextChanged(Editable s) {
if (mListenerRelay != null) {
RecyclerView recyclerView = mProvider.getAttachedRecyclerView();
@SuppressWarnings("unchecked")
VH vh = (VH) (recyclerView != null ? findContainingViewHolder(recyclerView, target) : null);
if (vh != null) {
mListenerRelay.afterTextChanged(mProvider.getAttachedRecyclerView(), mProvider.getAdapter(), vh, target, s);
}
}
}
});
}
@Nullable
public static RecyclerView.ViewHolder findContainingViewHolder(@NonNull RecyclerView recyclerView, @NonNull View view) {
View v = view;
while (v != null && v.getParent() instanceof View) {
if (v.getParent() == recyclerView) {
return recyclerView.getChildViewHolder(v);
}
v = (View) v.getParent();
}
return null;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.annotations;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import org.apache.ambari.annotations.TransactionalLock.LockArea;
import org.apache.ambari.annotations.TransactionalLock.LockType;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.H2DatabaseCleaner;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.TransactionalLocks;
import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.persist.Transactional;
import com.google.inject.util.Modules;
/**
* Tests {@link TransactionalLock} and associated classes.
*/
public class TransactionalLockInterceptorTest {
private Injector m_injector;
@Before
public void setup() throws Exception {
m_injector = Guice.createInjector(
Modules.override(new InMemoryDefaultTestModule()).with(new MockModule()));
m_injector.getInstance(GuiceJpaInitializer.class);
}
@After
public void teardown() throws AmbariException, SQLException {
H2DatabaseCleaner.clearDatabaseAndStopPersistenceService(m_injector);
}
/**
* Tests that the {@link Transactional} and {@link TransactionalLock}
* annotations cause the interceptors to lock the right area.
*
* @throws Throwable
*/
@Test
public void testTransactionalLockInvocation() throws Throwable {
// create mocks
TransactionalLocks transactionalLocks = m_injector.getInstance(TransactionalLocks.class);
ReadWriteLock readWriteLock = EasyMock.createStrictMock(ReadWriteLock.class);
Lock readLock = EasyMock.createStrictMock(Lock.class);
Lock writeLock = EasyMock.createStrictMock(Lock.class);
// expectations
EasyMock.expect(transactionalLocks.getLock(LockArea.HRC_STATUS_CACHE)).andReturn(readWriteLock).times(2);
EasyMock.expect(readWriteLock.writeLock()).andReturn(writeLock).times(2);
writeLock.lock();
EasyMock.expectLastCall().once();
writeLock.unlock();
EasyMock.expectLastCall().once();
// replay
EasyMock.replay(transactionalLocks, readWriteLock, readLock, writeLock);
// invoke method with annotations
HostRoleCommandDAO hostRoleCommandDAO = m_injector.getInstance(HostRoleCommandDAO.class);
hostRoleCommandDAO.mergeAll(new ArrayList<>());
// verify locks are called
EasyMock.verify(transactionalLocks, readWriteLock, readLock, writeLock);
}
/**
* Tests that a {@link TransactionalLock} called within the constructs of an
* earlier transaction will still lock.
*
* @throws Throwable
*/
@Test
public void testNestedTransactional() throws Throwable {
// create mocks
TransactionalLocks transactionalLocks = m_injector.getInstance(TransactionalLocks.class);
ReadWriteLock readWriteLock = EasyMock.createStrictMock(ReadWriteLock.class);
Lock readLock = EasyMock.createStrictMock(Lock.class);
Lock writeLock = EasyMock.createStrictMock(Lock.class);
// expectations
EasyMock.expect(transactionalLocks.getLock(LockArea.HRC_STATUS_CACHE)).andReturn(readWriteLock).times(2);
EasyMock.expect(readWriteLock.writeLock()).andReturn(writeLock).times(2);
writeLock.lock();
EasyMock.expectLastCall().once();
writeLock.unlock();
EasyMock.expectLastCall().once();
// replay
EasyMock.replay(transactionalLocks, readWriteLock, readLock, writeLock);
// invoke method with annotations
TestObject testObject = m_injector.getInstance(TestObject.class);
testObject.testLockMethodAsChildOfActiveTransaction();
// verify locks are called
EasyMock.verify(transactionalLocks, readWriteLock, readLock, writeLock);
}
/**
* Tests that a {@link TransactionalLock} called within the constructs of an
* earlier transaction will still lock.
*
* @throws Throwable
*/
@Test
public void testMultipleLocks() throws Throwable {
// create mocks
TransactionalLocks transactionalLocks = m_injector.getInstance(TransactionalLocks.class);
ReadWriteLock readWriteLock = EasyMock.createStrictMock(ReadWriteLock.class);
Lock readLock = EasyMock.createStrictMock(Lock.class);
Lock writeLock = EasyMock.createStrictMock(Lock.class);
// expectations
EasyMock.expect(transactionalLocks.getLock(LockArea.HRC_STATUS_CACHE)).andReturn(readWriteLock).times(2);
EasyMock.expect(readWriteLock.writeLock()).andReturn(writeLock).times(2);
writeLock.lock();
EasyMock.expectLastCall().once();
writeLock.unlock();
EasyMock.expectLastCall().once();
// another round of expectations
EasyMock.expect(transactionalLocks.getLock(LockArea.HRC_STATUS_CACHE)).andReturn(readWriteLock).times(2);
EasyMock.expect(readWriteLock.writeLock()).andReturn(writeLock).times(2);
writeLock.lock();
EasyMock.expectLastCall().once();
writeLock.unlock();
EasyMock.expectLastCall().once();
// replay
EasyMock.replay(transactionalLocks, readWriteLock, readLock, writeLock);
// invoke method with annotations
TestObject testObject = m_injector.getInstance(TestObject.class);
testObject.testMultipleLocks();
// verify locks are called
EasyMock.verify(transactionalLocks, readWriteLock, readLock, writeLock);
}
/**
* Tests that two invocations of a {@link TransactionalLock} with the same
* {@link TransactionalLock} will only lock once on the {@link LockArea}.
*
* @throws Throwable
*/
@Test
public void testNestedMultipleLocks() throws Throwable {
// create mocks
TransactionalLocks transactionalLocks = m_injector.getInstance(TransactionalLocks.class);
ReadWriteLock readWriteLock = EasyMock.createStrictMock(ReadWriteLock.class);
Lock readLock = EasyMock.createStrictMock(Lock.class);
Lock writeLock = EasyMock.createStrictMock(Lock.class);
// expectations
EasyMock.expect(transactionalLocks.getLock(LockArea.HRC_STATUS_CACHE)).andReturn(readWriteLock).times(2);
EasyMock.expect(readWriteLock.writeLock()).andReturn(writeLock).times(2);
writeLock.lock();
EasyMock.expectLastCall().once();
writeLock.unlock();
EasyMock.expectLastCall().once();
// replay
EasyMock.replay(transactionalLocks, readWriteLock, readLock, writeLock);
// invoke method with annotations
TestObject testObject = m_injector.getInstance(TestObject.class);
testObject.testMultipleNestedLocks();
// verify locks are called
EasyMock.verify(transactionalLocks, readWriteLock, readLock, writeLock);
}
/**
* A test object which has methods annotated for use with this test class.
*/
public static class TestObject {
/**
* Calls:
* <ul>
* <li>@Transactional</li>
* <li>-> @TransactionalLock(lockArea = LockArea.HRC_STATUS_CACHE, lockType
* = LockType.WRITE)</li>
* </ul>
*/
public void testLockMethodAsChildOfActiveTransaction() {
transactionMethodCallingAnotherWithLock();
}
/**
* Calls:
* <ul>
* <li>@TransactionalLock(lockArea = LockArea.HRC_STATUS_CACHE, lockType =
* LockType.WRITE)</li>
* <li>@TransactionalLock(lockArea = LockArea.HRC_STATUS_CACHE, lockType =
* LockType.WRITE)</li>
* </ul>
*/
public void testMultipleLocks() {
transactionMethodWithLock();
transactionMethodWithLock();
}
/**
* Calls:
* <ul>
* <li>@TransactionalLock(lockArea = LockArea.HRC_STATUS_CACHE, lockType =
* LockType.WRITE)</li>
* <li>-> @TransactionalLock(lockArea = LockArea.HRC_STATUS_CACHE, lockType
* = LockType.WRITE)</li>
* </ul>
*/
public void testMultipleNestedLocks() {
transactionMethodWithLockCallingAnotherWithLock();
}
@Transactional
public void transactionMethodCallingAnotherWithLock() {
transactionMethodWithLock();
}
@Transactional
@TransactionalLock(lockArea = LockArea.HRC_STATUS_CACHE, lockType = LockType.WRITE)
public void transactionMethodWithLock() {
}
@Transactional
@TransactionalLock(lockArea = LockArea.HRC_STATUS_CACHE, lockType = LockType.WRITE)
public void transactionMethodWithLockCallingAnotherWithLock() {
transactionMethodWithLock();
}
}
/**
*
*/
private class MockModule implements Module {
/**
*
*/
@Override
public void configure(Binder binder) {
binder.bind(TransactionalLocks.class).toInstance(
EasyMock.createNiceMock(TransactionalLocks.class));
}
}
}
|
|
/*
* SqlServerRuleReader.java
*
* This file is part of SQL Workbench/J, http://www.sql-workbench.net
*
* Copyright 2002-2015, Thomas Kellerer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* To contact the author please send an email to: support@sql-workbench.net
*
*/
package workbench.db.mssql;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
import workbench.log.LogMgr;
import workbench.resource.Settings;
import workbench.db.ColumnIdentifier;
import workbench.db.DbMetadata;
import workbench.db.DbObject;
import workbench.db.ObjectListExtender;
import workbench.db.WbConnection;
import workbench.storage.DataStore;
import workbench.util.CollectionUtil;
import workbench.util.SqlUtil;
import workbench.util.StringUtil;
/**
* A class to read information about rules from SQL Server.
*
* @author Thomas Kellerer
*/
public class SqlServerRuleReader
implements ObjectListExtender
{
public List<SqlServerRule> getRuleList(WbConnection connection, String schemaPattern, String namePattern, String ruleTable)
{
List<SqlServerRule> result = new ArrayList<>();
Statement stmt = null;
ResultSet rs = null;
try
{
stmt = connection.createStatementForQuery();
String sql = getSql(connection, schemaPattern, namePattern);
rs = stmt.executeQuery(sql);
while (rs.next())
{
String cat = rs.getString(1);
String schema = rs.getString(2);
String name = rs.getString(3);
SqlServerRule rule = new SqlServerRule(cat, schema, name);
result.add(rule);
}
}
catch (SQLException e)
{
LogMgr.logError("SqlServerRuleReader.getRuleList()", "Could not read rules", e);
}
finally
{
SqlUtil.closeAll(rs, stmt);
}
return result;
}
@Override
public SqlServerRule getObjectDefinition(WbConnection connection, DbObject object)
{
List<SqlServerRule> rules = getRuleList(connection, object.getSchema(), object.getObjectName(), null);
if (rules == null || rules.isEmpty()) return null;
SqlServerRule rule = rules.get(0);
// getObjectDefinition is called from within the DbExplorer with the selected object
// we might have already retrieved the comment for the rule here due to the SqlServerObjectListEnhancer.
// So we just copy the comment of the request object to the retrieved rule in order to avoid a second round-trip to the database.
if (rule.getComment() == null && object.getComment() != null)
{
rule.setComment(object.getComment());
}
return rule;
}
@Override
public boolean extendObjectList(WbConnection con, DataStore result, String catalog, String schema, String objectNamePattern, String[] requestedTypes)
{
if (!DbMetadata.typeIncluded("RULE", requestedTypes)) return false;
List<SqlServerRule> rules = getRuleList(con, schema, objectNamePattern, null);
if (rules.isEmpty()) return false;
for (SqlServerRule rule : rules)
{
int row = result.addRow();
result.setValue(row, DbMetadata.COLUMN_IDX_TABLE_LIST_CATALOG, rule.getCatalog());
result.setValue(row, DbMetadata.COLUMN_IDX_TABLE_LIST_SCHEMA, rule.getSchema());
result.setValue(row, DbMetadata.COLUMN_IDX_TABLE_LIST_NAME, rule.getObjectName());
result.setValue(row, DbMetadata.COLUMN_IDX_TABLE_LIST_REMARKS, rule.getComment());
result.setValue(row, DbMetadata.COLUMN_IDX_TABLE_LIST_TYPE, rule.getObjectType());
result.getRow(row).setUserObject(rule);
}
return true;
}
@Override
public boolean isDerivedType()
{
return false;
}
@Override
public boolean handlesType(String type)
{
return StringUtil.equalStringIgnoreCase("RULE", type);
}
@Override
public boolean handlesType(String[] types)
{
if (types == null) return true;
for (String type : types)
{
if (handlesType(type)) return true;
}
return false;
}
@Override
public DataStore getObjectDetails(WbConnection con, DbObject object)
{
if (object == null) return null;
if (!handlesType(object.getObjectType())) return null;
SqlServerRule rule = getObjectDefinition(con, object);
if (rule == null) return null;
String[] columns = new String[] { "RULE", "REMARKS" };
int[] types = new int[] { Types.VARCHAR, Types.VARCHAR };
int[] sizes = new int[] { 20, 20 };
DataStore result = new DataStore(columns, types, sizes);
result.addRow();
result.setValue(0, 0, rule.getObjectName());
result.setValue(0, 1, rule.getComment());
return result;
}
@Override
public List<String> supportedTypes()
{
return CollectionUtil.arrayList("RULE");
}
@Override
public String getObjectSource(WbConnection con, DbObject object)
{
Statement stmt = null;
ResultSet rs = null;
StringBuilder result = new StringBuilder(50);
try
{
stmt = con.createStatementForQuery();
rs = stmt.executeQuery("sp_helptext '" + object.getFullyQualifiedName(con) + "'");
while (rs.next())
{
String src = rs.getString(1);
result.append(src);
}
}
catch (SQLException e)
{
LogMgr.logError("SqlServerRuleReader.getObjectSource()", "Could not retrieve rule source: ", e);
return null;
}
finally
{
SqlUtil.closeAll(rs, stmt);
}
if (result.length() > 0)
{
result.append(";\n");
return result.toString();
}
return null;
}
private String getSql(WbConnection con, String ruleSchemaPattern, String ruleNamePattern)
{
StringBuilder sql = new StringBuilder(150);
if (SqlServerUtil.isSqlServer2005(con))
{
// "Modern" versions of SQL Server
String baseSql =
"select db_name() as rule_catalog, \n" +
" sc.name as rule_schema, \n" +
" ao.name as rule_name \n" +
"from sys.all_objects ao with (nolock) \n" +
" join sys.schemas sc with (nolock) on ao.schema_id = sc.schema_id \n" +
"where ao.type = 'R'";
sql.append(baseSql);
if (StringUtil.isNonBlank(ruleNamePattern))
{
sql.append("\n AND ");
SqlUtil.appendExpression(sql, "ao.name", ruleNamePattern, con);
}
if (StringUtil.isNonBlank(ruleSchemaPattern))
{
sql.append("\n AND ");
SqlUtil.appendExpression(sql, "sc.name", ruleSchemaPattern, con);
}
sql.append("\n ORDER BY 1, 2 ");
}
else
{
// SQL Server 2000
String query =
"select db_name() as rule_catalog, \n" +
" convert(sysname, user_name(uid)) as rule_schema, \n" +
" name as rule_name \n" +
"from sysobjects with (nolock) \n" +
"where type = 'R' ";
sql.append(query);
if (StringUtil.isNonBlank(ruleNamePattern))
{
sql.append("\n AND ");
SqlUtil.appendExpression(sql, "name", ruleNamePattern, con);
}
if (StringUtil.isNonBlank(ruleSchemaPattern))
{
sql.append("\n AND ");
SqlUtil.appendExpression(sql, "convert(sysname, user_name(uid))", ruleSchemaPattern, con);
}
}
if (Settings.getInstance().getDebugMetadataSql())
{
LogMgr.logDebug("SqlServerRuleReader.getSql()", "Using SQL=\n" + sql);
}
return sql.toString();
}
@Override
public List<ColumnIdentifier> getColumns(WbConnection con, DbObject object)
{
return null;
}
@Override
public boolean hasColumns()
{
return false;
}
}
|
|
package org.apache.axis2.transport.mqtt;
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.eclipse.paho.client.mqttv3.*;
import java.io.IOException;
import java.sql.Timestamp;
public class MqttAsyncCallback implements MqttCallback {
int state = BEGIN;
static final int BEGIN = 0;
static final int CONNECTED = 1;
static final int PUBLISHED = 2;
static final int SUBSCRIBED = 3;
static final int DISCONNECTED = 4;
static final int FINISH = 5;
static final int ERROR = 6;
static final int DISCONNECT = 7;
private MqttConnectOptions conOpt;
private Log log = LogFactory.getLog(MqttAsyncCallback.class);
// Private instance variables
private MqttAsyncClient client;
private String brokerUrl;
private Throwable ex = null;
private final Object waiter = new Object();
private boolean donext = false;
public void setConOpt(MqttConnectOptions conOpt) {
this.conOpt = conOpt;
}
public MqttAsyncCallback(MqttAsyncClient clientAsync) throws MqttException {
client = clientAsync;
// Set this wrapper as the callback handler
client.setCallback(this);
}
/**
* Publish / send a message to an MQTT server
*
* @param topicName the name of the topic to publish to
* @param message the set of bytes to send to the MQTT server
* @throws MqttException
*/
public void publish(String topicName, MqttMessage message) throws Throwable {
// Use a state machine to decide which step to do next. State change occurs
// when a notification is received that an MQTT action has completed
while (state != FINISH) {
switch (state) {
case BEGIN:
// Connect using a non-blocking connect
MqttConnector con = new MqttConnector();
con.doConnect();
break;
case CONNECTED:
// Publish using a non-blocking publisher
Publisher pub = new Publisher();
pub.doPublish(topicName, message);
break;
case PUBLISHED:
state = DISCONNECT;
donext = true;
break;
case DISCONNECT:
Disconnector disc = new Disconnector();
disc.doDisconnect();
break;
case ERROR:
throw ex;
case DISCONNECTED:
state = FINISH;
donext = true;
break;
}
waitForStateChange(10000);
}
}
/**
* Wait for a maximum amount of time for a state change event to occur
*
* @param maxTTW maximum time to wait in milliseconds
* @throws MqttException
*/
private void waitForStateChange(int maxTTW) throws MqttException {
synchronized (waiter) {
if (!donext) {
try {
waiter.wait(maxTTW);
} catch (InterruptedException e) {
log.info("timed out");
e.printStackTrace();
}
if (ex != null) {
throw (MqttException) ex;
}
}
donext = false;
}
}
/**
* Subscribe to a topic on an MQTT server
* Once subscribed this method waits for the messages to arrive from the server
* that match the subscription. It continues listening for messages until the enter key is
* pressed.
*
* @param topicName to subscribe to (can be wild carded)
* @param qos the maximum quality of service to receive messages at for this subscription
* @throws MqttException
*/
public void subscribe(String topicName, int qos) throws Throwable {
// Use a state machine to decide which step to do next. State change occurs
// when a notification is received that an MQTT action has completed
while (state != FINISH) {
switch (state) {
case BEGIN:
// Connect using a non-blocking connect
MqttConnector con = new MqttConnector();
con.doConnect();
break;
case CONNECTED:
// Subscribe using a non-blocking subscribe
Subscriber sub = new Subscriber();
sub.doSubscribe(topicName, qos);
break;
case SUBSCRIBED:
// Block until Enter is pressed allowing messages to arrive
log.info("Press <Enter> to exit");
try {
System.in.read();
} catch (IOException e) {
//If we can't read we'll just exit
}
state = DISCONNECT;
donext = true;
break;
case DISCONNECT:
Disconnector disc = new Disconnector();
disc.doDisconnect();
break;
case ERROR:
throw ex;
case DISCONNECTED:
state = FINISH;
donext = true;
break;
}
waitForStateChange(10000);
}
}
public void connectionLost(Throwable throwable) {
//ignoring for the moment...
}
public void messageArrived(String s, MqttMessage mqttMessage) throws Exception {
throw new IllegalStateException();
}
public void deliveryComplete(IMqttDeliveryToken iMqttDeliveryToken) {
log.info("message delivered .. : " + iMqttDeliveryToken.toString());
}
/**
* Connect in a non-blocking way and then sit back and wait to be
* notified that the action has completed.
*/
public class MqttConnector {
public MqttConnector() {
}
public void doConnect() {
// Connect to the server
// Get a token and setup an asynchronous listener on the token which
// will be notified once the connect completes
log.info("Connecting to " + brokerUrl + " with client ID " + client.getClientId());
IMqttActionListener conListener = new IMqttActionListener() {
public void onSuccess(IMqttToken asyncActionToken) {
log.info("Connected");
state = CONNECTED;
carryOn();
}
public void onFailure(IMqttToken asyncActionToken, Throwable exception) {
ex = exception;
state = ERROR;
log.info("connect failed" + exception);
carryOn();
}
public void carryOn() {
synchronized (waiter) {
donext = true;
waiter.notifyAll();
}
}
};
try {
// Connect using a non-blocking connect
client.connect(conOpt, "Connect sample context", conListener);
} catch (MqttException e) {
// If though it is a non-blocking connect an exception can be
// thrown if validation of parms fails or other checks such
// as already connected fail.
state = ERROR;
donext = true;
ex = e;
}
}
}
/**
* Publish in a non-blocking way and then sit back and wait to be
* notified that the action has completed.
*/
public class Publisher {
public void doPublish(String topicName, MqttMessage message) {
// Send / publish a message to the server
// Get a token and setup an asynchronous listener on the token which
// will be notified once the message has been delivered
// MqttMessage message = new MqttMessage(payload);
//message.setQos(qos);
String time = new Timestamp(System.currentTimeMillis()).toString();
// Setup a listener object to be notified when the publish completes.
IMqttActionListener pubListener = new IMqttActionListener() {
public void onSuccess(IMqttToken asyncActionToken) {
log.info("Publish Completed");
state = PUBLISHED;
carryOn();
}
public void onFailure(IMqttToken asyncActionToken, Throwable exception) {
ex = exception;
state = ERROR;
log.info("Publish failed" + exception);
carryOn();
}
public void carryOn() {
synchronized (waiter) {
donext = true;
waiter.notifyAll();
}
}
};
try {
// Publish the message
client.publish(topicName, message, "Pub sample context", pubListener);
} catch (MqttException e) {
state = ERROR;
donext = true;
ex = e;
}
}
}
/**
* Subscribe in a non-blocking way and then sit back and wait to be
* notified that the action has completed.
*/
public class Subscriber {
public void doSubscribe(String topicName, int qos) {
// Make a subscription
// Get a token and setup an asynchronous listener on the token which
// will be notified once the subscription is in place.
log.info("Subscribing to topic \"" + topicName + "\" qos " + qos);
IMqttActionListener subListener = new IMqttActionListener() {
public void onSuccess(IMqttToken asyncActionToken) {
log.info("Subscribe Completed");
state = SUBSCRIBED;
carryOn();
}
public void onFailure(IMqttToken asyncActionToken, Throwable exception) {
ex = exception;
state = ERROR;
log.info("Subscribe failed" + exception);
carryOn();
}
public void carryOn() {
synchronized (waiter) {
donext = true;
waiter.notifyAll();
}
}
};
try {
client.subscribe(topicName, qos, "Subscribe sample context", subListener);
} catch (MqttException e) {
state = ERROR;
donext = true;
ex = e;
}
}
}
/**
* Disconnect in a non-blocking way and then sit back and wait to be
* notified that the action has completed.
*/
public class Disconnector {
public void doDisconnect() {
// Disconnect the client
log.info("Disconnecting");
IMqttActionListener discListener = new IMqttActionListener() {
public void onSuccess(IMqttToken asyncActionToken) {
log.info("Disconnect Completed");
state = DISCONNECTED;
carryOn();
}
public void onFailure(IMqttToken asyncActionToken, Throwable exception) {
ex = exception;
state = ERROR;
log.info("Disconnect failed" + exception);
carryOn();
}
public void carryOn() {
synchronized (waiter) {
donext = true;
waiter.notifyAll();
}
}
};
try {
client.disconnect("Disconnect sample context", discListener);
} catch (MqttException e) {
state = ERROR;
donext = true;
ex = e;
}
}
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.hive.metastore.Column;
import com.facebook.presto.hive.metastore.Table;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.ErrorCodeSupplier;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.RecordCursor;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.predicate.NullableValue;
import com.facebook.presto.spi.type.CharType;
import com.facebook.presto.spi.type.DecimalType;
import com.facebook.presto.spi.type.Decimals;
import com.facebook.presto.spi.type.StandardTypes;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.VarcharType;
import com.google.common.base.Joiner;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import io.airlift.slice.Slice;
import io.airlift.slice.SliceUtf8;
import io.airlift.slice.Slices;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat;
import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat;
import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.DateTimeParser;
import org.joda.time.format.DateTimePrinter;
import org.joda.time.format.ISODateTimeFormat;
import javax.annotation.Nullable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Base64;
import java.util.List;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.facebook.presto.hive.HiveColumnHandle.ColumnType.PARTITION_KEY;
import static com.facebook.presto.hive.HiveColumnHandle.ColumnType.REGULAR;
import static com.facebook.presto.hive.HiveColumnHandle.bucketColumnHandle;
import static com.facebook.presto.hive.HiveColumnHandle.isBucketColumnHandle;
import static com.facebook.presto.hive.HiveColumnHandle.isPathColumnHandle;
import static com.facebook.presto.hive.HiveColumnHandle.pathColumnHandle;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_CANNOT_OPEN_SPLIT;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_METADATA;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_PARTITION_VALUE;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_VIEW_DATA;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_SERDE_NOT_FOUND;
import static com.facebook.presto.hive.HivePartitionKey.HIVE_DEFAULT_DYNAMIC_PARTITION;
import static com.facebook.presto.hive.RetryDriver.retry;
import static com.facebook.presto.hive.metastore.MetastoreUtil.getHiveSchema;
import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.Chars.isCharType;
import static com.facebook.presto.spi.type.Chars.trimSpaces;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.facebook.presto.spi.type.DecimalType.createDecimalType;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.IntegerType.INTEGER;
import static com.facebook.presto.spi.type.RealType.REAL;
import static com.facebook.presto.spi.type.SmallintType.SMALLINT;
import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.spi.type.TinyintType.TINYINT;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Lists.transform;
import static java.lang.Byte.parseByte;
import static java.lang.Double.parseDouble;
import static java.lang.Float.floatToRawIntBits;
import static java.lang.Float.parseFloat;
import static java.lang.Integer.parseInt;
import static java.lang.Long.parseLong;
import static java.lang.Short.parseShort;
import static java.lang.String.format;
import static java.math.BigDecimal.ROUND_UNNECESSARY;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Objects.requireNonNull;
import static org.apache.hadoop.hive.common.FileUtils.unescapePathName;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_INPUT_FORMAT;
import static org.apache.hadoop.hive.serde.serdeConstants.DECIMAL_TYPE_NAME;
import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB;
import static org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS;
import static org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR;
import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
public final class HiveUtil
{
public static final String PRESTO_VIEW_FLAG = "presto_view";
private static final String VIEW_PREFIX = "/* Presto View: ";
private static final String VIEW_SUFFIX = " */";
private static final DateTimeFormatter HIVE_DATE_PARSER = ISODateTimeFormat.date().withZoneUTC();
private static final DateTimeFormatter HIVE_TIMESTAMP_PARSER;
private static final Pattern SUPPORTED_DECIMAL_TYPE = Pattern.compile(DECIMAL_TYPE_NAME + "\\((\\d+),(\\d+)\\)");
private static final int DECIMAL_PRECISION_GROUP = 1;
private static final int DECIMAL_SCALE_GROUP = 2;
private static final String BIG_DECIMAL_POSTFIX = "BD";
static {
DateTimeParser[] timestampWithoutTimeZoneParser = {
DateTimeFormat.forPattern("yyyy-M-d").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSS").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSSSSSS").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSSSSSSSS").getParser(),
};
DateTimePrinter timestampWithoutTimeZonePrinter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSSSSSSSS").getPrinter();
HIVE_TIMESTAMP_PARSER = new DateTimeFormatterBuilder().append(timestampWithoutTimeZonePrinter, timestampWithoutTimeZoneParser).toFormatter().withZoneUTC();
}
private HiveUtil()
{
}
public static RecordReader<?, ?> createRecordReader(Configuration configuration, Path path, long start, long length, Properties schema, List<HiveColumnHandle> columns)
{
// determine which hive columns we will read
List<HiveColumnHandle> readColumns = ImmutableList.copyOf(filter(columns, column -> column.getColumnType() == REGULAR));
List<Integer> readHiveColumnIndexes = ImmutableList.copyOf(transform(readColumns, HiveColumnHandle::getHiveColumnIndex));
// Tell hive the columns we would like to read, this lets hive optimize reading column oriented files
setReadColumns(configuration, readHiveColumnIndexes);
InputFormat<?, ?> inputFormat = getInputFormat(configuration, schema, true);
JobConf jobConf = new JobConf(configuration);
FileSplit fileSplit = new FileSplit(path, start, length, (String[]) null);
// propagate serialization configuration to getRecordReader
schema.stringPropertyNames().stream()
.filter(name -> name.startsWith("serialization."))
.forEach(name -> jobConf.set(name, schema.getProperty(name)));
try {
return retry()
.stopOnIllegalExceptions()
.run("createRecordReader", () -> inputFormat.getRecordReader(fileSplit, jobConf, Reporter.NULL));
}
catch (Exception e) {
throw new PrestoException(HIVE_CANNOT_OPEN_SPLIT, format("Error opening Hive split %s (offset=%s, length=%s) using %s: %s",
path,
start,
length,
getInputFormatName(schema),
e.getMessage()),
e);
}
}
public static void setReadColumns(Configuration configuration, List<Integer> readHiveColumnIndexes)
{
configuration.set(READ_COLUMN_IDS_CONF_STR, Joiner.on(',').join(readHiveColumnIndexes));
configuration.setBoolean(READ_ALL_COLUMNS, false);
}
static InputFormat<?, ?> getInputFormat(Configuration configuration, Properties schema, boolean symlinkTarget)
{
String inputFormatName = getInputFormatName(schema);
try {
JobConf jobConf = new JobConf(configuration);
Class<? extends InputFormat<?, ?>> inputFormatClass = getInputFormatClass(jobConf, inputFormatName);
if (symlinkTarget && (inputFormatClass == SymlinkTextInputFormat.class)) {
// symlink targets are always TextInputFormat
inputFormatClass = TextInputFormat.class;
}
return ReflectionUtils.newInstance(inputFormatClass, jobConf);
}
catch (ClassNotFoundException | RuntimeException e) {
throw new RuntimeException("Unable to create input format " + inputFormatName, e);
}
}
@SuppressWarnings({"unchecked", "RedundantCast"})
private static Class<? extends InputFormat<?, ?>> getInputFormatClass(JobConf conf, String inputFormatName)
throws ClassNotFoundException
{
// CDH uses different names for Parquet
if ("parquet.hive.DeprecatedParquetInputFormat".equals(inputFormatName) ||
"parquet.hive.MapredParquetInputFormat".equals(inputFormatName)) {
return MapredParquetInputFormat.class;
}
Class<?> clazz = conf.getClassByName(inputFormatName);
// TODO: remove redundant cast to Object after IDEA-118533 is fixed
return (Class<? extends InputFormat<?, ?>>) (Object) clazz.asSubclass(InputFormat.class);
}
static String getInputFormatName(Properties schema)
{
String name = schema.getProperty(FILE_INPUT_FORMAT);
checkCondition(name != null, HIVE_INVALID_METADATA, "Table or partition is missing Hive input format property: %s", FILE_INPUT_FORMAT);
return name;
}
public static long parseHiveDate(String value)
{
long millis = HIVE_DATE_PARSER.parseMillis(value);
return TimeUnit.MILLISECONDS.toDays(millis);
}
public static long parseHiveTimestamp(String value, DateTimeZone timeZone)
{
return HIVE_TIMESTAMP_PARSER.withZone(timeZone).parseMillis(value);
}
static boolean isSplittable(InputFormat<?, ?> inputFormat, FileSystem fileSystem, Path path)
{
// ORC uses a custom InputFormat but is always splittable
if (inputFormat.getClass().getSimpleName().equals("OrcInputFormat")) {
return true;
}
// use reflection to get isSplittable method on FileInputFormat
Method method = null;
for (Class<?> clazz = inputFormat.getClass(); clazz != null; clazz = clazz.getSuperclass()) {
try {
method = clazz.getDeclaredMethod("isSplitable", FileSystem.class, Path.class);
break;
}
catch (NoSuchMethodException ignored) {
}
}
if (method == null) {
return false;
}
try {
method.setAccessible(true);
return (boolean) method.invoke(inputFormat, fileSystem, path);
}
catch (InvocationTargetException | IllegalAccessException e) {
throw Throwables.propagate(e);
}
}
public static StructObjectInspector getTableObjectInspector(Properties schema)
{
return getTableObjectInspector(getDeserializer(schema));
}
public static StructObjectInspector getTableObjectInspector(@SuppressWarnings("deprecation") Deserializer deserializer)
{
try {
ObjectInspector inspector = deserializer.getObjectInspector();
checkArgument(inspector.getCategory() == Category.STRUCT, "expected STRUCT: %s", inspector.getCategory());
return (StructObjectInspector) inspector;
}
catch (SerDeException e) {
throw Throwables.propagate(e);
}
}
public static List<? extends StructField> getTableStructFields(Table table)
{
return getTableObjectInspector(getHiveSchema(table)).getAllStructFieldRefs();
}
public static boolean isDeserializerClass(Properties schema, Class<?> deserializerClass)
{
return getDeserializerClassName(schema).equals(deserializerClass.getName());
}
public static String getDeserializerClassName(Properties schema)
{
String name = schema.getProperty(SERIALIZATION_LIB);
checkCondition(name != null, HIVE_INVALID_METADATA, "Table or partition is missing Hive deserializer property: %s", SERIALIZATION_LIB);
return name;
}
@SuppressWarnings("deprecation")
public static Deserializer getDeserializer(Properties schema)
{
String name = getDeserializerClassName(schema);
Deserializer deserializer = createDeserializer(getDeserializerClass(name));
initializeDeserializer(deserializer, schema);
return deserializer;
}
@SuppressWarnings("deprecation")
private static Class<? extends Deserializer> getDeserializerClass(String name)
{
// CDH uses different names for Parquet
if ("parquet.hive.serde.ParquetHiveSerDe".equals(name)) {
return ParquetHiveSerDe.class;
}
try {
return Class.forName(name, true, JavaUtils.getClassLoader()).asSubclass(Deserializer.class);
}
catch (ClassNotFoundException e) {
throw new PrestoException(HIVE_SERDE_NOT_FOUND, "deserializer does not exist: " + name);
}
catch (ClassCastException e) {
throw new RuntimeException("invalid deserializer class: " + name);
}
}
@SuppressWarnings("deprecation")
private static Deserializer createDeserializer(Class<? extends Deserializer> clazz)
{
try {
return clazz.getConstructor().newInstance();
}
catch (ReflectiveOperationException e) {
throw new RuntimeException("error creating deserializer: " + clazz.getName(), e);
}
}
@SuppressWarnings("deprecation")
private static void initializeDeserializer(Deserializer deserializer, Properties schema)
{
try {
deserializer.initialize(new Configuration(false), schema);
}
catch (SerDeException e) {
throw new RuntimeException("error initializing deserializer: " + deserializer.getClass().getName());
}
}
public static boolean isHiveNull(byte[] bytes)
{
return bytes.length == 2 && bytes[0] == '\\' && bytes[1] == 'N';
}
public static NullableValue parsePartitionValue(String partitionName, String value, Type type, DateTimeZone timeZone)
{
boolean isNull = HIVE_DEFAULT_DYNAMIC_PARTITION.equals(value);
if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
if (isNull) {
return NullableValue.asNull(decimalType);
}
if (decimalType.isShort()) {
if (value.isEmpty()) {
return NullableValue.of(decimalType, 0L);
}
return NullableValue.of(decimalType, shortDecimalPartitionKey(value, decimalType, partitionName));
}
else {
if (value.isEmpty()) {
return NullableValue.of(decimalType, Decimals.encodeUnscaledValue(BigInteger.ZERO));
}
return NullableValue.of(decimalType, longDecimalPartitionKey(value, decimalType, partitionName));
}
}
if (BOOLEAN.equals(type)) {
if (isNull) {
return NullableValue.asNull(BOOLEAN);
}
if (value.isEmpty()) {
return NullableValue.of(BOOLEAN, false);
}
return NullableValue.of(BOOLEAN, booleanPartitionKey(value, partitionName));
}
if (TINYINT.equals(type)) {
if (isNull) {
return NullableValue.asNull(TINYINT);
}
if (value.isEmpty()) {
return NullableValue.of(TINYINT, 0L);
}
return NullableValue.of(TINYINT, tinyintPartitionKey(value, partitionName));
}
if (SMALLINT.equals(type)) {
if (isNull) {
return NullableValue.asNull(SMALLINT);
}
if (value.isEmpty()) {
return NullableValue.of(SMALLINT, 0L);
}
return NullableValue.of(SMALLINT, smallintPartitionKey(value, partitionName));
}
if (INTEGER.equals(type)) {
if (isNull) {
return NullableValue.asNull(INTEGER);
}
if (value.isEmpty()) {
return NullableValue.of(INTEGER, 0L);
}
return NullableValue.of(INTEGER, integerPartitionKey(value, partitionName));
}
if (BIGINT.equals(type)) {
if (isNull) {
return NullableValue.asNull(BIGINT);
}
if (value.isEmpty()) {
return NullableValue.of(BIGINT, 0L);
}
return NullableValue.of(BIGINT, bigintPartitionKey(value, partitionName));
}
if (DATE.equals(type)) {
if (isNull) {
return NullableValue.asNull(DATE);
}
return NullableValue.of(DATE, datePartitionKey(value, partitionName));
}
if (TIMESTAMP.equals(type)) {
if (isNull) {
return NullableValue.asNull(TIMESTAMP);
}
return NullableValue.of(TIMESTAMP, timestampPartitionKey(value, timeZone, partitionName));
}
if (REAL.equals(type)) {
if (isNull) {
return NullableValue.asNull(REAL);
}
if (value.isEmpty()) {
return NullableValue.of(REAL, (long) floatToRawIntBits(0.0f));
}
return NullableValue.of(REAL, floatPartitionKey(value, partitionName));
}
if (DOUBLE.equals(type)) {
if (isNull) {
return NullableValue.asNull(DOUBLE);
}
if (value.isEmpty()) {
return NullableValue.of(DOUBLE, 0.0);
}
return NullableValue.of(DOUBLE, doublePartitionKey(value, partitionName));
}
if (type instanceof VarcharType) {
if (isNull) {
return NullableValue.asNull(type);
}
return NullableValue.of(type, varcharPartitionKey(value, partitionName, type));
}
if (isCharType(type)) {
if (isNull) {
return NullableValue.asNull(type);
}
return NullableValue.of(type, charPartitionKey(value, partitionName, type));
}
throw new PrestoException(NOT_SUPPORTED, format("Unsupported Type [%s] for partition: %s", type, partitionName));
}
public static boolean isPrestoView(Table table)
{
return "true".equals(table.getParameters().get(PRESTO_VIEW_FLAG));
}
public static String encodeViewData(String data)
{
return VIEW_PREFIX + Base64.getEncoder().encodeToString(data.getBytes(UTF_8)) + VIEW_SUFFIX;
}
public static String decodeViewData(String data)
{
checkCondition(data.startsWith(VIEW_PREFIX), HIVE_INVALID_VIEW_DATA, "View data missing prefix: %s", data);
checkCondition(data.endsWith(VIEW_SUFFIX), HIVE_INVALID_VIEW_DATA, "View data missing suffix: %s", data);
data = data.substring(VIEW_PREFIX.length());
data = data.substring(0, data.length() - VIEW_SUFFIX.length());
return new String(Base64.getDecoder().decode(data), UTF_8);
}
public static Optional<DecimalType> getDecimalType(HiveType hiveType)
{
return getDecimalType(hiveType.getHiveTypeName());
}
public static Optional<DecimalType> getDecimalType(String hiveTypeName)
{
Matcher matcher = SUPPORTED_DECIMAL_TYPE.matcher(hiveTypeName);
if (matcher.matches()) {
int precision = parseInt(matcher.group(DECIMAL_PRECISION_GROUP));
int scale = parseInt(matcher.group(DECIMAL_SCALE_GROUP));
return Optional.of(createDecimalType(precision, scale));
}
else {
return Optional.empty();
}
}
public static boolean isArrayType(Type type)
{
return type.getTypeSignature().getBase().equals(StandardTypes.ARRAY);
}
public static boolean isMapType(Type type)
{
return type.getTypeSignature().getBase().equals(StandardTypes.MAP);
}
public static boolean isRowType(Type type)
{
return type.getTypeSignature().getBase().equals(StandardTypes.ROW);
}
public static boolean isStructuralType(Type type)
{
String baseName = type.getTypeSignature().getBase();
return baseName.equals(StandardTypes.MAP) || baseName.equals(StandardTypes.ARRAY) || baseName.equals(StandardTypes.ROW);
}
public static boolean isStructuralType(HiveType hiveType)
{
return hiveType.getCategory() == Category.LIST || hiveType.getCategory() == Category.MAP || hiveType.getCategory() == Category.STRUCT;
}
public static boolean booleanPartitionKey(String value, String name)
{
if (value.equalsIgnoreCase("true")) {
return true;
}
if (value.equalsIgnoreCase("false")) {
return false;
}
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for BOOLEAN partition key: %s", value, name));
}
public static long bigintPartitionKey(String value, String name)
{
try {
return parseLong(value);
}
catch (NumberFormatException e) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for BIGINT partition key: %s", value, name));
}
}
public static long integerPartitionKey(String value, String name)
{
try {
return parseInt(value);
}
catch (NumberFormatException e) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for INTEGER partition key: %s", value, name));
}
}
public static long smallintPartitionKey(String value, String name)
{
try {
return parseShort(value);
}
catch (NumberFormatException e) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for SMALLINT partition key: %s", value, name));
}
}
public static long tinyintPartitionKey(String value, String name)
{
try {
return parseByte(value);
}
catch (NumberFormatException e) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for TINYINT partition key: %s", value, name));
}
}
public static long floatPartitionKey(String value, String name)
{
try {
return floatToRawIntBits(parseFloat(value));
}
catch (NumberFormatException e) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for FLOAT partition key: %s", value, name));
}
}
public static double doublePartitionKey(String value, String name)
{
try {
return parseDouble(value);
}
catch (NumberFormatException e) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for DOUBLE partition key: %s", value, name));
}
}
public static long datePartitionKey(String value, String name)
{
try {
return parseHiveDate(value);
}
catch (IllegalArgumentException e) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for DATE partition key: %s", value, name));
}
}
public static long timestampPartitionKey(String value, DateTimeZone zone, String name)
{
try {
return parseHiveTimestamp(value, zone);
}
catch (IllegalArgumentException e) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for TIMESTAMP partition key: %s", value, name));
}
}
public static long shortDecimalPartitionKey(String value, DecimalType type, String name)
{
return decimalPartitionKey(value, type, name).unscaledValue().longValue();
}
public static Slice longDecimalPartitionKey(String value, DecimalType type, String name)
{
return Decimals.encodeUnscaledValue(decimalPartitionKey(value, type, name).unscaledValue());
}
private static BigDecimal decimalPartitionKey(String value, DecimalType type, String name)
{
try {
if (value.endsWith(BIG_DECIMAL_POSTFIX)) {
value = value.substring(0, value.length() - BIG_DECIMAL_POSTFIX.length());
}
BigDecimal decimal = new BigDecimal(value);
decimal = decimal.setScale(type.getScale(), ROUND_UNNECESSARY);
if (decimal.precision() > type.getPrecision()) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for %s partition key: %s", value, type.toString(), name));
}
return decimal;
}
catch (NumberFormatException e) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for %s partition key: %s", value, type.toString(), name));
}
}
public static Slice varcharPartitionKey(String value, String name, Type columnType)
{
Slice partitionKey = Slices.utf8Slice(value);
VarcharType varcharType = (VarcharType) columnType;
if (SliceUtf8.countCodePoints(partitionKey) > varcharType.getLength()) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for %s partition key: %s", value, columnType.toString(), name));
}
return partitionKey;
}
public static Slice charPartitionKey(String value, String name, Type columnType)
{
Slice partitionKey = trimSpaces(Slices.utf8Slice(value));
CharType charType = (CharType) columnType;
if (SliceUtf8.countCodePoints(partitionKey) > charType.getLength()) {
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for %s partition key: %s", value, columnType.toString(), name));
}
return partitionKey;
}
public static SchemaTableName schemaTableName(ConnectorTableHandle tableHandle)
{
return ((HiveTableHandle) tableHandle).getSchemaTableName();
}
public static List<HiveColumnHandle> hiveColumnHandles(String connectorId, Table table)
{
ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder();
// add the data fields first
columns.addAll(getRegularColumnHandles(connectorId, table));
// add the partition keys last (like Hive does)
columns.addAll(getPartitionKeyColumnHandles(connectorId, table));
// add hidden columns
columns.add(pathColumnHandle(connectorId));
if (table.getStorage().getBucketProperty().isPresent()) {
columns.add(bucketColumnHandle(connectorId));
}
return columns.build();
}
public static List<HiveColumnHandle> getRegularColumnHandles(String connectorId, Table table)
{
ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder();
int hiveColumnIndex = 0;
for (Column field : table.getDataColumns()) {
// ignore unsupported types rather than failing
HiveType hiveType = field.getType();
if (hiveType.isSupportedType()) {
columns.add(new HiveColumnHandle(connectorId, field.getName(), hiveType, hiveType.getTypeSignature(), hiveColumnIndex, REGULAR, field.getComment()));
}
hiveColumnIndex++;
}
return columns.build();
}
public static List<HiveColumnHandle> getPartitionKeyColumnHandles(String connectorId, Table table)
{
ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder();
List<Column> partitionKeys = table.getPartitionColumns();
for (Column field : partitionKeys) {
HiveType hiveType = field.getType();
if (!hiveType.isSupportedType()) {
throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type %s found in partition keys of table %s.%s", hiveType, table.getDatabaseName(), table.getTableName()));
}
columns.add(new HiveColumnHandle(connectorId, field.getName(), hiveType, hiveType.getTypeSignature(), -1, PARTITION_KEY, field.getComment()));
}
return columns.build();
}
public static Slice base64Decode(byte[] bytes)
{
return Slices.wrappedBuffer(Base64.getDecoder().decode(bytes));
}
public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, String formatString, Object... args)
{
if (!condition) {
throw new PrestoException(errorCode, format(formatString, args));
}
}
@Nullable
public static String columnExtraInfo(boolean partitionKey)
{
return partitionKey ? "partition key" : null;
}
public static List<String> toPartitionValues(String partitionName)
{
// mimics Warehouse.makeValsFromName
ImmutableList.Builder<String> resultBuilder = ImmutableList.builder();
int start = 0;
while (true) {
while (start < partitionName.length() && partitionName.charAt(start) != '=') {
start++;
}
start++;
int end = start;
while (end < partitionName.length() && partitionName.charAt(end) != '/') {
end++;
}
if (start > partitionName.length()) {
break;
}
resultBuilder.add(unescapePathName(partitionName.substring(start, end)));
start = end + 1;
}
return resultBuilder.build();
}
public static String getPrefilledColumnValue(HiveColumnHandle columnHandle, HivePartitionKey partitionKey, Path path, OptionalInt bucketNumber)
{
if (partitionKey != null) {
return partitionKey.getValue();
}
if (isPathColumnHandle(columnHandle)) {
return path.toString();
}
if (isBucketColumnHandle(columnHandle)) {
return String.valueOf(bucketNumber.getAsInt());
}
throw new PrestoException(NOT_SUPPORTED, "unsupported hidden column: " + columnHandle);
}
public static void closeWithSuppression(RecordCursor recordCursor, Throwable throwable)
{
requireNonNull(recordCursor, "recordCursor is null");
requireNonNull(throwable, "throwable is null");
try {
recordCursor.close();
}
catch (RuntimeException e) {
// Self-suppression not permitted
if (throwable != e) {
throwable.addSuppressed(e);
}
}
}
}
|
|
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.eclipse.editors.rete.model;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.drools.core.spi.Constraint;
import org.drools.eclipse.reteoo.AccumulateNodeVertex;
import org.drools.eclipse.reteoo.AlphaNodeVertex;
import org.drools.eclipse.reteoo.BaseVertex;
import org.drools.eclipse.reteoo.ConditionalBranchNodeVertex;
import org.drools.eclipse.reteoo.EntryPointNodeVertex;
import org.drools.eclipse.reteoo.EvalConditionNodeVertex;
import org.drools.eclipse.reteoo.ExistsNodeVertex;
import org.drools.eclipse.reteoo.FromNodeVertex;
import org.drools.eclipse.reteoo.JoinNodeVertex;
import org.drools.eclipse.reteoo.LeftInputAdapterNodeVertex;
import org.drools.eclipse.reteoo.NotNodeVertex;
import org.drools.eclipse.reteoo.ObjectTypeNodeVertex;
import org.drools.eclipse.reteoo.PropagationQueuingNodeVertex;
import org.drools.eclipse.reteoo.QueryElementNodeVertex;
import org.drools.eclipse.reteoo.QueryRiaFixerNodeVertex;
import org.drools.eclipse.reteoo.QueryTerminalNodeVertex;
import org.drools.eclipse.reteoo.ReteVertex;
import org.drools.eclipse.reteoo.RightInputAdapterNodeVertex;
import org.drools.eclipse.reteoo.RuleTerminalNodeVertex;
import org.drools.eclipse.reteoo.TimerNodeVertex;
import org.drools.eclipse.reteoo.WindowNodeVertex;
import org.eclipse.ui.views.properties.IPropertyDescriptor;
import org.eclipse.ui.views.properties.IPropertySource;
import org.eclipse.ui.views.properties.PropertyDescriptor;
/**
* Providing IPropertySource for property sheets that show
* rete graph vertex properties.
*/
public class VertexPropertySource
implements
IPropertySource {
private static final String VERTEX_FROM = "From BaseVertex";
private static final String VERTEX_ACCUMULATE = "Accumulate BaseVertex";
private static final String VERTEX_COLLECT = "Collect BaseVertex";
private static final String VERTEX_EXISTS = "Exists BaseVertex";
private static final String VERTEX_TERMINAL = "Terminal BaseVertex";
private static final String VERTEX_QUERY_TERMINAL = "Query Terminal BaseVertex";
private static final String VERTEX_EVAL_CONDITION = "Eval Condition BaseVertex";
private static final String VERTEX_NOT = "Not BaseVertex";
private static final String VERTEX_JOIN = "Join BaseVertex";
private static final String VERTEX_RIGHT_INPUT_ADAPTER = "Right Input Adapter BaseVertex";
private static final String VERTEX_LEFT_INPUT_ADAPTER = "Left Input Adapter BaseVertex";
private static final String VERTEX_ALPHA = "Alpha BaseVertex";
private static final String VERTEX_OBJECT_TYPE = "Object Type BaseVertex";
private static final String VERTEX_RETE = "Rete BaseVertex";
private static final String VERTEX_ENTRY_POINT = "Entry Point BaseVertex";
private static final String VERTEX_PROPAGATION_QUEUING = "Propagation Queuing BaseVertex";
private static final String VERTEX_CONDITIONAL_BRANCH = "Conditional Branch BaseVertex";
private static final String VERTEX_QUERY_ELEMENT = "Query Element BaseVertex";
private static final String VERTEX_QUERY_RIA_FIXER = "Query Ria Fixer BaseVertex";
private static final String VERTEX_TIMER = "Timer BaseVertex";
private static final String VERTEX_WINDOW = "Window BaseVertex";
private static final String CONSTRAINT_CAP = "Constraint";
private static final String CONSTRAINT = "constraint";
public String ID_ROOT = "vertex"; //$NON-NLS-1$
public String ID_ID = "id"; //$NON-NLS-1$
public String ID_HTML = "html"; //$NON-NLS-1$
private IPropertyDescriptor[] descriptors;
private final IPropertyDescriptor PROP_NAME = new PropertyDescriptor( "name",
"Name" );
private final IPropertyDescriptor PROP_ID = new PropertyDescriptor( "id",
"ID" );
private final IPropertyDescriptor PROP_RULE = new PropertyDescriptor( "rule",
"Rule" );
private final IPropertyDescriptor PROP_QUERY = new PropertyDescriptor( "query",
"Query" );
// Alpha-specific
private final IPropertyDescriptor PROP_ALPHA_FIELD_NAME = new PropertyDescriptor( "fieldName",
"Field Name" );
private final IPropertyDescriptor PROP_ALPHA_EVALUATOR = new PropertyDescriptor( "evaluator",
"Evaluator" );
private final IPropertyDescriptor PROP_ALPHA_VALUE = new PropertyDescriptor( "value",
"Value" );
// ObjectType specific
private final IPropertyDescriptor PROP_OBJ_TYPE = new PropertyDescriptor( "objectType",
"Object Type" );
private final IPropertyDescriptor PROP_EXP_OFFSET = new PropertyDescriptor( "expirationOffset",
"Expiration Offset" );
// EntryPoint specific
private final IPropertyDescriptor PROP_ENTRY_POINT_NAME = new PropertyDescriptor( "entryPointName",
"Entry Point Name" );
private final static String CAT_GENERAL = "General";
private final static String CAT_OTHER = "Other";
protected BaseVertex vertex = null;
// Map<String,NodeValue>
private Map<String, NodeValue> values = new HashMap<String, NodeValue>();
/**
* Constructor initializing properties from <code>vertex</code>
*
* @param vertex source vertex for this property source
*/
public VertexPropertySource(BaseVertex vertex) {
this.vertex = vertex;
initProperties( vertex );
}
final private void initProperties(BaseVertex vertex) {
List<IPropertyDescriptor> descriptorList = new ArrayList<IPropertyDescriptor>();
if ( vertex instanceof ExistsNodeVertex ) {
initExistsNodeProperties( (ExistsNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof FromNodeVertex ) {
initFromNodeProperties( (FromNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof AccumulateNodeVertex ) {
initAccumulateNodeProperties( (AccumulateNodeVertex) vertex,
descriptorList,
values );
}else if ( vertex instanceof RuleTerminalNodeVertex ) {
initTerminalNodeProperties( (RuleTerminalNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof QueryTerminalNodeVertex ) {
initQueryTerminalNodeProperties( (QueryTerminalNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof EvalConditionNodeVertex ) {
initEvalConditionNodeProperties( (EvalConditionNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof NotNodeVertex ) {
initNotNodeProperties( (NotNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof JoinNodeVertex ) {
initJoinNodeProperties( (JoinNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof RightInputAdapterNodeVertex ) {
initRightInputAdapterNodeProperties( descriptorList,
values );
} else if ( vertex instanceof LeftInputAdapterNodeVertex ) {
initLeftInputAdapterNodeProperties( (LeftInputAdapterNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof AlphaNodeVertex ) {
initAlphaNodeProperties( (AlphaNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof ObjectTypeNodeVertex ) {
initObjectTypeNodeProperties( (ObjectTypeNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof ReteVertex ) {
initReteNodeProperties( (ReteVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof PropagationQueuingNodeVertex ) {
initPropagationQueuingNodeProperties( (PropagationQueuingNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof EntryPointNodeVertex ) {
initEntryPointNodeProperties( (EntryPointNodeVertex) vertex,
descriptorList,
values );
} else if ( vertex instanceof ConditionalBranchNodeVertex ) {
initConditionalBranchNodeProperties( (ConditionalBranchNodeVertex) vertex, descriptorList, values );
} else if ( vertex instanceof QueryElementNodeVertex ) {
initQueryElementNodeProperties( (QueryElementNodeVertex) vertex, descriptorList, values );
} else if ( vertex instanceof QueryRiaFixerNodeVertex ) {
initQueryRiaFixerNodeProperties( (QueryRiaFixerNodeVertex) vertex, descriptorList, values );
} else if ( vertex instanceof TimerNodeVertex ) {
initTimerNodeProperties( (TimerNodeVertex) vertex, descriptorList, values );
} else if ( vertex instanceof WindowNodeVertex ) {
initWindowNodeProperties( (WindowNodeVertex) vertex, descriptorList, values );
}
descriptors = descriptorList.toArray( new IPropertyDescriptor[0] );
}
private void initExistsNodeProperties(ExistsNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_EXISTS,
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( vertex.getId() ),
descriptorList,
valueMap );
}
private void initAccumulateNodeProperties(AccumulateNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_ACCUMULATE,
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( vertex.getId() ),
descriptorList,
valueMap );
}
private void initFromNodeProperties(FromNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_FROM,
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( vertex.getId() ),
descriptorList,
valueMap );
}
private void initReteNodeProperties(ReteVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_RETE,
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( vertex.getId() ),
descriptorList,
valueMap );
}
private void initObjectTypeNodeProperties(ObjectTypeNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_OBJECT_TYPE,
descriptorList,
valueMap );
addProperty( PROP_OBJ_TYPE,
vertex.getObjectType(),
descriptorList,
valueMap );
addProperty( PROP_EXP_OFFSET,
vertex.getExpirationOffset(),
descriptorList,
valueMap );
}
private void initAlphaNodeProperties(AlphaNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_ALPHA,
descriptorList,
valueMap );
addProperty( PROP_ALPHA_FIELD_NAME,
vertex.getFieldName(),
descriptorList,
valueMap );
addProperty( PROP_ALPHA_EVALUATOR,
vertex.getEvaluator(),
descriptorList,
valueMap );
addProperty( PROP_ALPHA_VALUE,
vertex.getValue(),
descriptorList,
valueMap );
Constraint constraint = vertex.getConstraint();
if ( constraint == null ) {
return;
}
IPropertyDescriptor prop = new PropertyDescriptor( CONSTRAINT,
CONSTRAINT_CAP );
addProperty( prop,
constraint.toString(),
descriptorList,
valueMap );
}
private void initLeftInputAdapterNodeProperties(LeftInputAdapterNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_LEFT_INPUT_ADAPTER,
descriptorList,
valueMap );
}
private void initRightInputAdapterNodeProperties(List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_RIGHT_INPUT_ADAPTER,
descriptorList,
valueMap );
}
private void initJoinNodeProperties(JoinNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_JOIN,
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( vertex.getId() ),
descriptorList,
valueMap );
Constraint[] constraints = vertex.getConstraints();
if ( constraints == null ) {
return;
}
for ( int i = 0, length = constraints.length; i < length; i++ ) {
PropertyDescriptor prop = new PropertyDescriptor( CONSTRAINT + (i + 1),
CONSTRAINT_CAP + " " + (i + 1) );
addOther( prop,
constraints[i].toString(),
descriptorList,
valueMap );
}
}
private void initNotNodeProperties(NotNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_NOT,
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( vertex.getId() ),
descriptorList,
valueMap );
}
private void initEvalConditionNodeProperties(EvalConditionNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_EVAL_CONDITION,
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( vertex.getId() ),
descriptorList,
valueMap );
}
private void initTerminalNodeProperties(RuleTerminalNodeVertex node,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_TERMINAL,
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( node.getId() ),
descriptorList,
valueMap );
addProperty( PROP_RULE,
node.getRuleName(),
descriptorList,
valueMap );
}
private void initQueryTerminalNodeProperties(QueryTerminalNodeVertex node,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_QUERY_TERMINAL,
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( node.getId() ),
descriptorList,
valueMap );
addProperty( PROP_QUERY,
node.getQueryName(),
descriptorList,
valueMap );
}
private void initPropagationQueuingNodeProperties(PropagationQueuingNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_PROPAGATION_QUEUING,
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( vertex.getId() ),
descriptorList,
valueMap );
}
private void initEntryPointNodeProperties(EntryPointNodeVertex vertex,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
addProperty( PROP_NAME,
VERTEX_ENTRY_POINT,
descriptorList,
valueMap );
addProperty( PROP_ENTRY_POINT_NAME,
vertex.getEntryPointName(),
descriptorList,
valueMap );
addProperty( PROP_ID,
Integer.toString( vertex.getId() ),
descriptorList,
valueMap );
}
private void initConditionalBranchNodeProperties(ConditionalBranchNodeVertex vertex,
List descriptorList, Map valueMap) {
addProperty(PROP_NAME, VERTEX_CONDITIONAL_BRANCH, descriptorList, valueMap);
addProperty(PROP_ID, Integer.toString(vertex.getId()), descriptorList,
valueMap);
}
private void initQueryElementNodeProperties(QueryElementNodeVertex vertex,
List descriptorList, Map valueMap) {
addProperty(PROP_NAME, VERTEX_QUERY_ELEMENT, descriptorList, valueMap);
addProperty(PROP_ID, Integer.toString(vertex.getId()), descriptorList,
valueMap);
}
private void initQueryRiaFixerNodeProperties(QueryRiaFixerNodeVertex vertex,
List descriptorList, Map valueMap) {
addProperty(PROP_NAME, VERTEX_QUERY_RIA_FIXER, descriptorList, valueMap);
addProperty(PROP_ID, Integer.toString(vertex.getId()), descriptorList,
valueMap);
}
private void initTimerNodeProperties(TimerNodeVertex vertex,
List descriptorList, Map valueMap) {
addProperty(PROP_NAME, VERTEX_TIMER, descriptorList, valueMap);
addProperty(PROP_ID, Integer.toString(vertex.getId()), descriptorList,
valueMap);
}
private void initWindowNodeProperties(WindowNodeVertex vertex,
List descriptorList, Map valueMap) {
addProperty(PROP_NAME, VERTEX_WINDOW, descriptorList, valueMap);
addProperty(PROP_ID, Integer.toString(vertex.getId()), descriptorList,
valueMap);
}
private void addProperty(IPropertyDescriptor field,
String value,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
descriptorList.add( field );
valueMap.put( field.getId().toString(),
new NodeValue( CAT_GENERAL,
value ) );
if ( field instanceof PropertyDescriptor ) {
((PropertyDescriptor) field).setAlwaysIncompatible( true );
((PropertyDescriptor) field).setCategory( CAT_GENERAL );
}
}
private void addOther(IPropertyDescriptor field,
String value,
List<IPropertyDescriptor> descriptorList,
Map<String, NodeValue> valueMap) {
descriptorList.add( field );
valueMap.put( field.getId().toString(),
new NodeValue( CAT_OTHER,
value ) );
if ( field instanceof PropertyDescriptor ) {
((PropertyDescriptor) field).setAlwaysIncompatible( true );
((PropertyDescriptor) field).setCategory( CAT_OTHER );
}
}
/* (non-Javadoc)
* @see org.eclipse.ui.views.properties.IPropertySource#getEditableValue()
*/
public Object getEditableValue() {
return null;
}
/* (non-Javadoc)
* @see org.eclipse.ui.views.properties.IPropertySource#getPropertyValue(java.lang.Object)
*/
public Object getPropertyValue(Object propName) {
return getPropertyValue( (String) propName );
}
/**
* Property value.
*
* @param propName
* @return
*/
public Object getPropertyValue(String propName) {
return (values.get( propName )).value;
}
/* (non-Javadoc)
* @see org.eclipse.ui.views.properties.IPropertySource#setPropertyValue(java.lang.Object, java.lang.Object)
*/
public void setPropertyValue(Object propName,
Object value) {
setPropertyValue( propName,
value );
}
/* (non-Javadoc)
* @see org.eclipse.ui.views.properties.IPropertySource#getPropertyDescriptors()
*/
public IPropertyDescriptor[] getPropertyDescriptors() {
return descriptors;
}
/**
* Doing nothing as resetting properties from property sheet is not possible.
*/
public void resetPropertyValue(Object propName) {
}
/* (non-Javadoc)
* @see org.eclipse.ui.views.properties.IPropertySource#isPropertySet(java.lang.Object)
*/
public boolean isPropertySet(Object propName) {
return values.containsKey( propName );
}
private class NodeValue {
final String category;
final String value;
NodeValue(String category,
String value) {
this.category = category;
this.value = value;
}
}
}
|
|
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2008-2009, The KiWi Project (http://www.kiwi-project.eu)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the KiWi Project nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* Contributor(s):
*
*
*/
package kiwi.test.service.importer;
import java.io.InputStream;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import junit.framework.Assert;
import kiwi.api.content.ContentItemService;
import kiwi.api.entity.KiWiEntityManager;
import kiwi.api.importexport.importer.Importer;
import kiwi.api.triplestore.TripleStore;
import kiwi.api.user.UserService;
import kiwi.exception.UserExistsException;
import kiwi.model.content.ContentItem;
import kiwi.model.kbase.KiWiUriResource;
import kiwi.model.user.User;
import kiwi.test.base.KiWiTest;
import org.jboss.seam.Component;
import org.jboss.seam.log.Log;
import org.jboss.seam.log.Logging;
import org.jboss.seam.security.Identity;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
/**
* @author Sebastian Schaffert
*
*/
public class RSSImporterTest extends KiWiTest {
@BeforeMethod
@Override
public void begin() {
super.begin();
String[] ontologies = { "ontology_kiwi.owl" };
try {
setupDatabase(ontologies);
} catch(Exception ex) {
ex.printStackTrace();
}
}
@AfterMethod
@Override
public void end() {
try {
clearDatabase();
} catch(Exception ex) {
ex.printStackTrace();
}
super.end();
}
@Test
public void testImportRSS() throws Exception {
new FacesRequest() {
@Override
protected void invokeApplication() {
Log log = Logging.getLog(this.getClass());
log.info(">>>>>>>>>>> testImportRSS (1)");
Identity.setSecurityEnabled(false);
UserService us = (UserService) Component.getInstance("userService");
KiWiEntityManager km = (KiWiEntityManager) Component.getInstance("kiwiEntityManager");
log.info(">>>>>>>>>>> (2)");
User user = null;
try {
user = us.createUser("wastl","Sebastian", "Schaffert", "WastlGotAPasswordToo");
km.persist(user);
log.info(">>>>>>>>>>> (2)");
} catch(UserExistsException ex) {
}
}
}.run();
new FacesRequest() {
@Override
protected void invokeApplication() throws Exception {
Log log = Logging.getLog(this.getClass());
log.info(">>>>>>>>>>> testImportRSS (3)");
Identity.setSecurityEnabled(false);
UserService us = (UserService) Component.getInstance("userService");
Importer rss = (Importer) Component.getInstance("kiwi.service.importer.rss");
log.info(">>>>>>>>>>> testImportRSS (4)");
User user = us.getUserByLogin("wastl");
log.info(">>>>>>>>>>> testImportRSS (5)");
InputStream is = this.getClass().getResourceAsStream("feed.rss");
log.info(">>>>>>>>>>> testImportRSS (6)");
rss.importData(is, null, null, null, user, null);
log.info(">>>>>>>>>>> testImportRSS (7)");
}
}.run();
new FacesRequest() {
@Override
protected void invokeApplication() {
Log log = Logging.getLog(this.getClass());
log.info(">>>>>>>>>>> testImportRSS (8)");
Identity.setSecurityEnabled(false);
KiWiEntityManager km = (KiWiEntityManager) Component.getInstance("kiwiEntityManager");
EntityManager em = (EntityManager) Component.getInstance("entityManager");
TripleStore ts = (TripleStore) Component.getInstance("tripleStore");
UserService us = (UserService) Component.getInstance("userService");
ContentItemService cs = (ContentItemService) Component.getInstance("contentItemService");
log.info(">>>>>>>>>>> testImportRSS (9)");
User user = us.getUserByLogin("wastl");
ContentItem entry1 = cs.getContentItemByTitle("1st KiWi Programming Camp");
log.info(">>>>>>>>>>> testImportRSS (10)");
// check whether an entry can be retrieved as content item
Assert.assertNotNull(entry1);
Assert.assertEquals(user, entry1.getAuthor());
log.info(">>>>>>>>>>> testImportRSS (11)");
// check that the kiwiknows tag has been created exactly once
Query q = em.createQuery("select count(ci) from ContentItem ci where ci.title='kiwiknows'");
log.info(">>>>>>>>>>> count ci title kiwiknows: #0 ", q.getSingleResult());
Assert.assertEquals(1L,q.getSingleResult());
log.info(">>>>>>>>>>> testImportRSS (12)");
}
}.run();
}
@Test
public void testImportReuters() throws Exception {
new FacesRequest() {
@Override
protected void invokeApplication() {
Identity.setSecurityEnabled(false);
UserService us = (UserService) Component.getInstance("userService");
KiWiEntityManager km = (KiWiEntityManager) Component.getInstance("kiwiEntityManager");
User user = null;
try {
user = us.createUser("wastl","Sebastian", "Schaffert", "WastlGotAPasswordToo");
km.persist(user);
} catch(UserExistsException ex) {
}
}
}.run();
new FacesRequest() {
@Override
protected void invokeApplication() throws Exception {
Identity.setSecurityEnabled(false);
UserService us = (UserService) Component.getInstance("userService");
Importer rss = (Importer) Component.getInstance("kiwi.service.importer.rss");
User user = us.getUserByLogin("wastl");
InputStream is = this.getClass().getResourceAsStream("reuters.atom");
rss.importData(is, null, null, null, user, null);
}
}.run();
new FacesRequest() {
@Override
protected void invokeApplication() {
Identity.setSecurityEnabled(false);
KiWiEntityManager km = (KiWiEntityManager) Component.getInstance("kiwiEntityManager");
EntityManager em = (EntityManager) Component.getInstance("entityManager");
TripleStore ts = (TripleStore) Component.getInstance("tripleStore");
UserService us = (UserService) Component.getInstance("userService");
ContentItemService cs = (ContentItemService) Component.getInstance("contentItemService");
User user = us.getUserByLogin("wastl");
ContentItem entry1 = cs.getContentItemByTitle("Husband of Philippine massacre victim files candidacy");
// check whether an entry can be retrieved as content item
Assert.assertNotNull(entry1);
Assert.assertEquals(user, entry1.getAuthor());
// check that the article URI is right
Assert.assertEquals("http://feeds.reuters.com/~r/reuters/worldNews/~3/ZJD_5vxqjrc/idUSTRE5AO0MW20091127",
((KiWiUriResource)entry1.getResource()).getUri());
}
}.run();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Nikolay A. Kuznetsov
* @version $Revision: 1.11.2.2 $
*/
package java.util.regex;
import java.util.BitSet;
/**
* User defined character classes ([abef]). See AbstractCharClass
* documentation for more details.
*
* @author Nikolay A. Kuznetsov
* @version $Revision: 1.11.2.2 $
*/
class CharClass extends AbstractCharClass {
// Flag indicates if we add supplement upper/lower case
boolean ci = false;
boolean uci = false;
// Flag indicates if there are unicode supplements
boolean hasUCI = false;
boolean invertedSurrogates = false;
boolean inverted = false;
boolean hideBits = false;
BitSet bits = new BitSet();
AbstractCharClass nonBitSet = null;
public CharClass() {
}
public CharClass(boolean ci, boolean uci) {
this.ci = ci;
this.uci = uci;
}
public CharClass(boolean negative, boolean ci, boolean uci) {
this(ci, uci);
setNegative(negative);
}
/*
* We can use this method safely even if nonBitSet != null
* due to specific of range constructions in regular expressions.
*/
public CharClass add(int ch) {
if (ci) {
if ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z')) {
if (!inverted) {
bits.set(Pattern.getSupplement((char) ch));
} else {
bits.clear(Pattern.getSupplement((char) ch));
}
} else if (uci && ch > 128) {
hasUCI = true;
ch = Character.toLowerCase(Character.toUpperCase(ch));
// return this;
}
}
if (Lexer.isHighSurrogate(ch) || Lexer.isLowSurrogate(ch)) {
if (!invertedSurrogates) {
lowHighSurrogates.set(ch - Character.MIN_SURROGATE);
} else {
lowHighSurrogates.clear(ch - Character.MIN_SURROGATE);
}
}
if (!inverted) {
bits.set(ch);
} else
bits.clear(ch);
if (!mayContainSupplCodepoints && Character.isSupplementaryCodePoint(ch)) {
mayContainSupplCodepoints = true;
}
return this;
}
/*
* The difference between add(AbstractCharClass) and union(AbstractCharClass)
* is that add() is used for constructions like "[^abc\\d]"
* (this pattern doesn't match "1")
* while union is used for constructions like "[^abc[\\d]]"
* (this pattern matches "1").
*/
public CharClass add(final AbstractCharClass cc) {
if (!mayContainSupplCodepoints && cc.mayContainSupplCodepoints) {
mayContainSupplCodepoints = true;
}
if (!invertedSurrogates) {
//A | !B = ! ((A ^ B) & B)
if (cc.altSurrogates) {
lowHighSurrogates.xor(cc.getLowHighSurrogates());
lowHighSurrogates.and(cc.getLowHighSurrogates());
altSurrogates = !altSurrogates;
invertedSurrogates = true;
//A | B
} else {
lowHighSurrogates.or(cc.getLowHighSurrogates());
}
} else {
//!A | !B = !(A & B)
if (cc.altSurrogates) {
lowHighSurrogates.and(cc.getLowHighSurrogates());
//!A | B = !(A & !B)
} else {
lowHighSurrogates.andNot(cc.getLowHighSurrogates());
}
}
if (!hideBits && cc.getBits() != null) {
if (!inverted) {
//A | !B = ! ((A ^ B) & B)
if (cc.isNegative()) {
bits.xor(cc.getBits());
bits.and(cc.getBits());
alt = !alt;
inverted = true;
//A | B
} else {
bits.or(cc.getBits());
}
} else {
//!A | !B = !(A & B)
if (cc.isNegative()) {
bits.and(cc.getBits());
//!A | B = !(A & !B)
} else {
bits.andNot(cc.getBits());
}
}
} else {
final boolean curAlt = alt;
if (nonBitSet == null) {
if (curAlt && !inverted && bits.isEmpty()) {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return cc.contains(ch);
}
};
//alt = true;
} else {
/*
* We keep the value of alt unchanged for
* constructions like [^[abc]fgb] by using
* the formula a ^ b == !a ^ !b.
*/
if (curAlt) {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return !((curAlt ^ bits.get(ch))
|| ((curAlt ^ inverted) ^ cc.contains(ch)));
}
};
//alt = true
} else {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return (curAlt ^ bits.get(ch))
|| ((curAlt ^ inverted) ^ cc.contains(ch));
}
};
//alt = false
}
}
hideBits = true;
} else {
final AbstractCharClass nb = nonBitSet;
if (curAlt) {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return !(curAlt ^ (nb.contains(ch) || cc.contains(ch)));
}
};
//alt = true
} else {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return curAlt ^ (nb.contains(ch) || cc.contains(ch));
}
};
//alt = false
}
}
}
return this;
}
public CharClass add(int st, int end) {
if (st > end)
throw new IllegalArgumentException();
if (!ci
//no intersection with surrogate characters
&& (end < Character.MIN_SURROGATE
|| st > Character.MAX_SURROGATE)) {
if (!inverted) {
bits.set(st, end + 1);
} else {
bits.clear(st, end + 1);
}
} else {
for (int i = st; i < end + 1; i++) {
add(i);
}
}
return this;
}
// OR operation
public void union(final AbstractCharClass clazz) {
if (!mayContainSupplCodepoints
&& clazz.mayContainSupplCodepoints) {
mayContainSupplCodepoints = true;
}
if (clazz.hasUCI())
this.hasUCI = true;
if (altSurrogates ^ clazz.altSurrogates) {
//!A | B = !(A & !B)
if (altSurrogates) {
lowHighSurrogates.andNot(clazz.getLowHighSurrogates());
//A | !B = !((A ^ B) & B)
} else {
lowHighSurrogates.xor(clazz.getLowHighSurrogates());
lowHighSurrogates.and(clazz.getLowHighSurrogates());
altSurrogates = true;
}
} else {
//!A | !B = !(A & B)
if (altSurrogates) {
lowHighSurrogates.and(clazz.getLowHighSurrogates());
//A | B
} else {
lowHighSurrogates.or(clazz.getLowHighSurrogates());
}
}
if (!hideBits && clazz.getBits() != null) {
if (alt ^ clazz.isNegative()) {
//!A | B = !(A & !B)
if (alt) {
bits.andNot(clazz.getBits());
//A | !B = !((A ^ B) & B)
} else {
bits.xor(clazz.getBits());
bits.and(clazz.getBits());
alt = true;
}
} else {
//!A | !B = !(A & B)
if (alt) {
bits.and(clazz.getBits());
//A | B
} else {
bits.or(clazz.getBits());
}
}
} else {
final boolean curAlt = alt;
if (nonBitSet == null) {
if (!inverted && bits.isEmpty()) {
if (curAlt) {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return !clazz.contains(ch);
}
};
//alt = true
} else {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return clazz.contains(ch);
}
};
//alt = false
}
} else {
if (curAlt) {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return !(clazz.contains(ch) || (curAlt ^ bits.get(ch)));
}
};
//alt = true
} else {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return clazz.contains(ch) || (curAlt ^ bits.get(ch));
}
};
//alt = false
}
}
hideBits = true;
} else {
final AbstractCharClass nb = nonBitSet;
if (curAlt) {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return !((curAlt ^ nb.contains(ch)) || clazz.contains(ch));
}
};
//alt = true
} else {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return (curAlt ^ nb.contains(ch)) || clazz.contains(ch);
}
};
//alt = false
}
}
}
}
// AND operation
public void intersection(final AbstractCharClass clazz) {
if (!mayContainSupplCodepoints
&& clazz.mayContainSupplCodepoints) {
mayContainSupplCodepoints = true;
}
if (clazz.hasUCI())
this.hasUCI = true;
if (altSurrogates ^ clazz.altSurrogates) {
//!A & B = ((A ^ B) & B)
if (altSurrogates) {
lowHighSurrogates.xor(clazz.getLowHighSurrogates());
lowHighSurrogates.and(clazz.getLowHighSurrogates());
altSurrogates = false;
//A & !B
} else {
lowHighSurrogates.andNot(clazz.getLowHighSurrogates());
}
} else {
//!A & !B = !(A | B)
if (altSurrogates) {
lowHighSurrogates.or(clazz.getLowHighSurrogates());
//A & B
} else {
lowHighSurrogates.and(clazz.getLowHighSurrogates());
}
}
if (!hideBits && clazz.getBits() != null) {
if (alt ^ clazz.isNegative()) {
//!A & B = ((A ^ B) & B)
if (alt) {
bits.xor(clazz.getBits());
bits.and(clazz.getBits());
alt = false;
//A & !B
} else {
bits.andNot(clazz.getBits());
}
} else {
//!A & !B = !(A | B)
if (alt) {
bits.or(clazz.getBits());
//A & B
} else {
bits.and(clazz.getBits());
}
}
} else {
final boolean curAlt = alt;
if (nonBitSet == null) {
if (!inverted && bits.isEmpty()) {
if (curAlt) {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return !clazz.contains(ch);
}
};
//alt = true
} else {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return clazz.contains(ch);
}
};
//alt = false
}
} else {
if (curAlt) {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return !(clazz.contains(ch) && (curAlt ^ bits.get(ch)));
}
};
//alt = true
} else {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return clazz.contains(ch) && (curAlt ^ bits.get(ch));
}
};
//alt = false
}
}
hideBits = true;
} else {
final AbstractCharClass nb = nonBitSet;
if (curAlt) {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return !((curAlt ^ nb.contains(ch)) && clazz.contains(ch));
}
};
//alt = true
} else {
nonBitSet = new AbstractCharClass() {
public boolean contains(int ch) {
return (curAlt ^ nb.contains(ch)) && clazz.contains(ch);
}
};
//alt = false
}
}
}
}
/**
* Returns <code>true</code> if character class contains symbol specified,
* <code>false</code> otherwise. Note: #setNegative() method changes the
* meaning of contains method;
*
* @param ch
* @return <code>true</code> if character class contains symbol specified;
*
* TODO: currently <code>character class</code> implementation based on
* BitSet, but this implementation possibly will be turned to combined
* BitSet(for first 256 symbols) and Black/Red tree for the rest of UTF.
*/
public boolean contains(int ch) {
if (nonBitSet == null) {
return this.alt ^ bits.get(ch);
} else {
return alt ^ nonBitSet.contains(ch);
}
}
protected BitSet getBits() {
if (hideBits)
return null;
return bits;
}
protected BitSet getLowHighSurrogates() {
return lowHighSurrogates;
}
public AbstractCharClass getInstance() {
if (nonBitSet == null) {
final BitSet bs = getBits();
AbstractCharClass res = new AbstractCharClass() {
public boolean contains(int ch) {
return this.alt ^ bs.get(ch);
}
public String toString() {
StringBuffer temp = new StringBuffer();
for (int i = bs.nextSetBit(0); i >= 0; i = bs
.nextSetBit(i + 1)) {
temp.append(Character.toChars(i));
temp.append('|');
}
if (temp.length() > 0)
temp.deleteCharAt(temp.length() - 1);
return temp.toString();
}
};
return res.setNegative(isNegative());
} else {
return this;
}
}
//for debugging purposes only
public String toString() {
StringBuffer temp = new StringBuffer();
for (int i = bits.nextSetBit(0); i >= 0; i = bits.nextSetBit(i + 1)) {
temp.append(Character.toChars(i));
temp.append('|');
}
if (temp.length() > 0)
temp.deleteCharAt(temp.length() - 1);
return temp.toString();
}
public boolean hasUCI() {
return hasUCI;
}
}
|
|
package org.nkjmlab.util.java.function;
import static org.assertj.core.api.Assertions.*;
import static org.junit.jupiter.api.Assertions.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import org.junit.jupiter.api.Test;
import org.nkjmlab.util.java.function.Try.ThrowableSupplier;
class TryTest {
@Test
void testGetOrDefault() {
String s = Try.getOrElse(() -> {
throw new RuntimeException("error");
}, "test");
assertThat(s).isEqualTo("test");
}
@Test
void testCreateRunnable() {
try {
Try.createRunnable(() -> {
throw new RuntimeException("try");
}, e -> {
}).run();
} catch (Exception e) {
assertThat(e.getMessage()).contains("try");
}
}
@Test
void testCreateSupplier() {
try {
Try.createSupplier(() -> {
throw new RuntimeException("try");
}, e -> "").get();
} catch (Exception e) {
assertThat(e.getMessage()).contains("try");
}
}
@Test
void testCreateSupplierWithThrow() {
try {
Try.createSupplierWithThrow(() -> {
throw new RuntimeException("try");
}, Try::rethrow).get();
} catch (Exception e) {
assertThat(e.getMessage()).contains("try");
}
}
@Test
void testCreateConsumer() {
try {
Try.createConsumer(con -> {
throw new RuntimeException("try");
}, e -> {
}).accept("a");
} catch (Exception e) {
assertThat(e.getMessage()).contains("try");
}
}
@Test
void testCreateConsumerWithThrow() {
try {
Try.createConsumerWithThrow(con -> {
throw new RuntimeException("try");
}, Try::rethrow).accept("a");
} catch (Exception e) {
assertThat(e.getMessage()).contains("try");
}
Try.createConsumerWithThrow(con -> {
}, Try::rethrow).accept("a");
}
@Test
void testCreateFunction() {
try {
Try.createFunction(con -> {
throw new RuntimeException("try");
}, e -> "").apply("a");
} catch (Exception e) {
assertThat(e.getMessage()).contains("try");
}
}
@Test
void testCreateFunctionWithThrow() {
try {
Try.createFunctionWithThrow(con -> {
throw new RuntimeException("try");
}, Try::rethrow).apply("a");
} catch (Exception e) {
assertThat(e.getMessage()).contains("try");
}
}
@Test
void testGetOrNull() {
Try.getOrElseNull(() -> {
throw new RuntimeException("try");
});
}
@Test
void testGetOrThrow() {
try {
Try.getOrElseThrow(() -> {
throw new RuntimeException("try");
}, Try::rethrow);
} catch (Exception e) {
assertThat(e.getMessage()).contains("try");
}
Try.getOrElseThrow(() -> {
return null;
}, Try::rethrow);
}
@Test
void testRunOrThrow() {
try {
Try.runOrElseThrow(() -> {
throw new RuntimeException("try");
}, Try::rethrow);
} catch (Exception e) {
assertThat(e.getMessage()).contains("try");
}
Try.runOrElseThrow(() -> {
}, Try::rethrow);
}
@Test
void testCreateBiConsumer() {
assertThrowsExactly(NullPointerException.class,
() -> Try.createBiConsumer(null, e -> Try.rethrow(e)).accept(null, null));
AtomicInteger i = new AtomicInteger(0);
BiConsumer<Integer, Integer> func = Try
.createBiConsumer((Integer a, Integer b) -> i.addAndGet(a + b), e -> System.err.println(e));
func.accept(1, 2);
assertThat(i.get()).isEqualTo(3);
}
@Test
void testCreateBiConsumerWithThrow() {
assertThrowsExactly(NullPointerException.class,
() -> Try.createBiConsumerWithThrow(null, e -> Try.rethrow(e)).accept(null, null));
AtomicInteger i = new AtomicInteger(0);
BiConsumer<Integer, Integer> func = Try.createBiConsumerWithThrow(
(Integer a, Integer b) -> i.addAndGet(a + b), e -> Try.rethrow(e));
func.accept(1, 2);
assertThat(i.get()).isEqualTo(3);
assertThrowsExactly(IllegalAccessError.class,
() -> Try.createBiConsumerWithThrow((Integer a, Integer b) -> {
throw new IllegalAccessError();
}, e -> Try.rethrow(e)).accept(1, 2));
}
@Test
void testCreateConsumer1() {
assertThrowsExactly(NullPointerException.class,
() -> Try.createConsumer(null, e -> Try.rethrow(e)).accept(null));
AtomicInteger i = new AtomicInteger(0);
Consumer<Integer> func =
Try.createConsumer((Integer a) -> i.addAndGet(a), e -> System.err.println(e));
func.accept(2);
assertThat(i.get()).isEqualTo(2);
}
@Test
void testCreateConsumerWithThrow1() {
assertThrowsExactly(NullPointerException.class,
() -> Try.createConsumerWithThrow(null, e -> Try.rethrow(e)).accept(null));
AtomicInteger i = new AtomicInteger(0);
Consumer<Integer> func = Try.createConsumerWithThrow((Integer a) -> {
i.addAndGet(a);
}, e -> Try.rethrow(e));
func.accept(2);
assertThat(i.get()).isEqualTo(2);
assertThrowsExactly(IllegalAccessError.class, () -> Try.createConsumerWithThrow((Integer a) -> {
throw new IllegalAccessError();
}, e -> Try.rethrow(e)).accept(2));
}
@Test
void testCreateFunction1() {
assertThrowsExactly(NullPointerException.class,
() -> Try.createFunction(null, e -> Try.rethrow(e)).apply(null));
AtomicInteger i = new AtomicInteger(0);
Function<Integer, Integer> func = Try.createFunction((Integer a) -> i.addAndGet(a), e -> -1);
func.apply(2);
assertThat(i.get()).isEqualTo(2);
}
@Test
void testCreateFunctionWithThrow1() {
assertThrowsExactly(NullPointerException.class,
() -> Try.createFunctionWithThrow(null, e -> Try.rethrow(e)).apply(null));
AtomicInteger i = new AtomicInteger(0);
Function<Integer, Integer> func =
Try.createFunctionWithThrow((Integer a) -> i.addAndGet(a), e -> Try.rethrow(e));
func.apply(2);
assertThat(i.get()).isEqualTo(2);
assertThrowsExactly(IllegalAccessError.class, () -> Try.createConsumerWithThrow((Integer a) -> {
throw new IllegalAccessError();
}, e -> Try.rethrow(e)).accept(2));
}
@Test
void testCreateRunnable1() {
assertThrowsExactly(NullPointerException.class,
() -> Try.createRunnable(null, e -> Try.rethrow(e)).run());
AtomicInteger i = new AtomicInteger(0);
Runnable func = Try.createRunnable(() -> i.incrementAndGet(), e -> System.err.println(e));
func.run();
assertThat(i.get()).isEqualTo(1);
}
@Test
void testCreateRunnableWithThrow() {
assertThrowsExactly(NullPointerException.class,
() -> Try.createRunnableWithThrow(null, e -> Try.rethrow(e)).run());
AtomicInteger i = new AtomicInteger(0);
Runnable func = Try.createRunnableWithThrow(() -> i.incrementAndGet(), e -> Try.rethrow(e));
func.run();
assertThat(i.get()).isEqualTo(1);
assertThrowsExactly(IllegalAccessError.class, () -> Try.createRunnableWithThrow(() -> {
throw new IllegalAccessError();
}, e -> Try.rethrow(e)).run());
}
@Test
void testCreateSupplier1() {
assertThrowsExactly(NullPointerException.class,
() -> Try.createSupplier(null, e -> Try.rethrow(e)).get());
AtomicInteger i = new AtomicInteger(0);
Supplier<Integer> func = Try.createSupplier(() -> i.incrementAndGet(), e -> -1);
assertThat(func.get()).isEqualTo(1);
}
@Test
void testCreateSupplierWithThrow1() {
assertThrowsExactly(NullPointerException.class,
() -> Try.createRunnableWithThrow(null, e -> Try.rethrow(e)).run());
AtomicInteger i = new AtomicInteger(0);
Supplier<Integer> func =
Try.createSupplierWithThrow(() -> i.incrementAndGet(), e -> Try.rethrow(e));
assertThat(func.get()).isEqualTo(1);
assertThrowsExactly(IllegalAccessError.class, () -> Try.createSupplierWithThrow(() -> {
throw new IllegalAccessError();
}, e -> Try.rethrow(e)).get());
}
@Test
void testGetOrElse() {
assertThat(Try.getOrElse(null, -1)).isEqualTo(-1);
assertThat(Try.getOrElse(() -> 2, -1)).isEqualTo(2);
}
@Test
void testGetOrElseNull() {
assertThat(Try.getOrElseNull((ThrowableSupplier<Integer>) null)).isEqualTo(null);
assertThat(Try.getOrElseNull(() -> 2)).isEqualTo(2);
}
@Test
void testGetOrElseThrow() {
assertThrowsExactly(NullPointerException.class,
() -> Try.getOrElseThrow(null, e -> Try.rethrow(e)));
assertThat(Try.getOrElseThrow(() -> 2, e -> Try.rethrow(e))).isEqualTo(2);
}
@Test
void testGetOrElseGet() {
assertThrowsExactly(NullPointerException.class,
() -> Try.getOrElseGet(null, e -> Try.rethrow(e)));
assertThat(Try.getOrElseGet(() -> 2, e -> 3)).isEqualTo(2);
}
@Test
void testRunOrElseDo() {
assertThrowsExactly(NullPointerException.class,
() -> Try.runOrElseDo(null, e -> Try.rethrow(e)));
Try.runOrElseDo(() -> {
}, e -> Try.rethrow(e));
}
@Test
void testRunOrElseThrow() {
assertThrowsExactly(NullPointerException.class,
() -> Try.runOrElseThrow(null, e -> Try.rethrow(e)));
Try.runOrElseThrow(() -> {
}, e -> Try.rethrow(e));
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.cmmn.test.async;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.extractProperty;
import static org.assertj.core.api.Assertions.tuple;
import java.time.Instant;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.flowable.cmmn.api.history.HistoricCaseInstance;
import org.flowable.cmmn.api.history.HistoricMilestoneInstance;
import org.flowable.cmmn.api.history.HistoricPlanItemInstance;
import org.flowable.cmmn.api.runtime.CaseInstance;
import org.flowable.cmmn.api.runtime.CaseInstanceState;
import org.flowable.cmmn.api.runtime.PlanItemDefinitionType;
import org.flowable.cmmn.api.runtime.PlanItemInstance;
import org.flowable.cmmn.api.runtime.PlanItemInstanceState;
import org.flowable.cmmn.api.runtime.UserEventListenerInstance;
import org.flowable.cmmn.engine.CmmnEngineConfiguration;
import org.flowable.cmmn.engine.impl.util.CommandContextUtil;
import org.flowable.cmmn.engine.test.CmmnDeployment;
import org.flowable.cmmn.test.impl.CustomCmmnConfigurationFlowableTestCase;
import org.flowable.common.engine.api.FlowableObjectNotFoundException;
import org.flowable.common.engine.api.scope.ScopeTypes;
import org.flowable.common.engine.impl.interceptor.Command;
import org.flowable.common.engine.impl.interceptor.CommandContext;
import org.flowable.common.engine.impl.interceptor.CommandExecutor;
import org.flowable.entitylink.api.EntityLinkType;
import org.flowable.entitylink.api.history.HistoricEntityLink;
import org.flowable.entitylink.api.history.HistoricEntityLinkService;
import org.flowable.identitylink.api.IdentityLink;
import org.flowable.identitylink.api.IdentityLinkType;
import org.flowable.identitylink.api.history.HistoricIdentityLink;
import org.flowable.job.api.HistoryJob;
import org.flowable.job.api.Job;
import org.flowable.job.service.JobServiceConfiguration;
import org.flowable.job.service.impl.persistence.entity.HistoryJobEntity;
import org.flowable.task.api.Task;
import org.flowable.task.api.history.HistoricTaskInstance;
import org.flowable.task.api.history.HistoricTaskLogEntry;
import org.flowable.task.api.history.HistoricTaskLogEntryBuilder;
import org.flowable.task.api.history.HistoricTaskLogEntryType;
import org.flowable.variable.api.history.HistoricVariableInstance;
import org.junit.Assert;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* @author Joram Barrez
* @author Filip Hrisafov
*/
public class AsyncCmmnHistoryTest extends CustomCmmnConfigurationFlowableTestCase {
@Override
protected String getEngineName() {
return "AsyncCmmnHistoryTest";
}
@Override
protected void configureConfiguration(CmmnEngineConfiguration cmmnEngineConfiguration) {
cmmnEngineConfiguration.setAsyncHistoryEnabled(true);
cmmnEngineConfiguration.setAsyncExecutorActivate(false);
cmmnEngineConfiguration.setAsyncHistoryExecutorActivate(false);
cmmnEngineConfiguration.setAsyncHistoryJsonGroupingEnabled(true);
cmmnEngineConfiguration.setAsyncHistoryJsonGroupingThreshold(1);
cmmnEngineConfiguration.setAsyncFailedJobWaitTime(1);
cmmnEngineConfiguration.setDefaultFailedJobWaitTime(1);
cmmnEngineConfiguration.setAsyncHistoryExecutorNumberOfRetries(10);
cmmnEngineConfiguration.setAsyncHistoryExecutorDefaultAsyncJobAcquireWaitTime(1000);
}
@Test
@CmmnDeployment
public void testCaseInstanceStartAndEnd() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder()
.caseDefinitionKey("oneHumanTaskCase")
.name("someName")
.businessKey("someBusinessKey")
.callbackId("someCallbackId")
.callbackType("someCallbackType")
.referenceId("someReferenceId")
.referenceType("someReferenceType")
.start();
assertThat(cmmnHistoryService.createHistoricCaseInstanceQuery().count()).isZero();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricCaseInstanceQuery().count()).isEqualTo(1);
HistoricCaseInstance historicCaseInstance = cmmnHistoryService.createHistoricCaseInstanceQuery().singleResult();
assertThat(historicCaseInstance.getId()).isEqualTo(caseInstance.getId());
assertThat(historicCaseInstance.getName()).isEqualTo("someName");
assertThat(historicCaseInstance.getParentId()).isNull();
assertThat(historicCaseInstance.getBusinessKey()).isEqualTo("someBusinessKey");
assertThat(historicCaseInstance.getCaseDefinitionId()).isEqualTo(caseInstance.getCaseDefinitionId());
assertThat(historicCaseInstance.getCaseDefinitionKey()).isEqualTo("oneHumanTaskCase");
assertThat(historicCaseInstance.getCaseDefinitionName()).isEqualTo("oneHumanTaskCaseName");
assertThat(historicCaseInstance.getCaseDefinitionVersion()).isEqualTo(1);
assertThat(historicCaseInstance.getCaseDefinitionDeploymentId()).isEqualTo(caseInstance.getCaseDefinitionDeploymentId());
assertThat(historicCaseInstance.getState()).isEqualTo(CaseInstanceState.ACTIVE);
assertThat(historicCaseInstance.getCallbackId()).isEqualTo("someCallbackId");
assertThat(historicCaseInstance.getCallbackType()).isEqualTo("someCallbackType");
assertThat(historicCaseInstance.getReferenceId()).isEqualTo("someReferenceId");
assertThat(historicCaseInstance.getReferenceType()).isEqualTo("someReferenceType");
assertThat(historicCaseInstance.getStartTime()).isNotNull();
assertThat(historicCaseInstance.getEndTime()).isNull();
Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult();
cmmnTaskService.complete(task.getId());
waitForAsyncHistoryExecutorToProcessAllJobs();
assertCaseInstanceEnded(caseInstance);
historicCaseInstance = cmmnHistoryService.createHistoricCaseInstanceQuery().singleResult();
assertThat(historicCaseInstance.getId()).isEqualTo(caseInstance.getId());
assertThat(historicCaseInstance.getName()).isEqualTo("someName");
assertThat(historicCaseInstance.getParentId()).isNull();
assertThat(historicCaseInstance.getBusinessKey()).isEqualTo("someBusinessKey");
assertThat(historicCaseInstance.getCaseDefinitionId()).isEqualTo(caseInstance.getCaseDefinitionId());
assertThat(historicCaseInstance.getState()).isEqualTo(CaseInstanceState.COMPLETED);
assertThat(historicCaseInstance.getCallbackId()).isEqualTo("someCallbackId");
assertThat(historicCaseInstance.getCallbackType()).isEqualTo("someCallbackType");
assertThat(historicCaseInstance.getReferenceId()).isEqualTo("someReferenceId");
assertThat(historicCaseInstance.getReferenceType()).isEqualTo("someReferenceType");
assertThat(historicCaseInstance.getStartTime()).isNotNull();
assertThat(historicCaseInstance.getEndTime()).isNotNull();
}
@Test
@CmmnDeployment
public void testHistoricCaseInstanceDeleted() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder()
.caseDefinitionKey("oneHumanTaskCase")
.name("someName")
.businessKey("someBusinessKey")
.variable("test", "test")
.start();
Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult();
cmmnTaskService.complete(task.getId());
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnRuntimeService.createPlanItemInstanceQuery().count()).isZero();
assertThat(cmmnHistoryService.createHistoricCaseInstanceQuery().count()).isEqualTo(1);
cmmnHistoryService.deleteHistoricCaseInstance(caseInstance.getId());
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricCaseInstanceQuery().count()).isZero();
}
@Test
public void testCreateTaskHistory() {
Task task = cmmnTaskService.createTaskBuilder().id("task1").create();
assertThat(cmmnHistoryService.createHistoricTaskInstanceQuery().taskId(task.getId()).singleResult()).isNull();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricTaskInstanceQuery().taskId(task.getId()).singleResult()).isNotNull();
assertThat(task.getId()).isEqualTo("task1");
cmmnTaskService.deleteTask(task.getId(), true);
}
@Test
@CmmnDeployment
public void testMilestoneReached() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("caseWithOneMilestone").start();
assertThat(cmmnRuntimeService.createMilestoneInstanceQuery().milestoneInstanceCaseInstanceId(caseInstance.getId()).count()).isEqualTo(1);
assertThat(cmmnHistoryService.createHistoricMilestoneInstanceQuery().milestoneInstanceCaseInstanceId(caseInstance.getId()).count()).isZero();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricMilestoneInstanceQuery().milestoneInstanceCaseInstanceId(caseInstance.getId()).count()).isEqualTo(1);
HistoricMilestoneInstance historicMilestoneInstance = cmmnHistoryService.createHistoricMilestoneInstanceQuery()
.milestoneInstanceCaseInstanceId(caseInstance.getId())
.singleResult();
assertThat(historicMilestoneInstance.getName()).isEqualTo("xyzMilestone");
assertThat(historicMilestoneInstance.getElementId()).isEqualTo("milestonePlanItem1");
assertThat(historicMilestoneInstance.getCaseInstanceId()).isEqualTo(caseInstance.getId());
assertThat(historicMilestoneInstance.getCaseDefinitionId()).isEqualTo(caseInstance.getCaseDefinitionId());
assertThat(historicMilestoneInstance.getTimeStamp()).isNotNull();
}
@Test
@CmmnDeployment
public void testIdentityLinks() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("caseWithOneMilestone").start();
cmmnRuntimeService.addUserIdentityLink(caseInstance.getId(), "someUser", IdentityLinkType.PARTICIPANT);
assertThat(cmmnHistoryService.getHistoricIdentityLinksForCaseInstance(caseInstance.getId())).isEmpty();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.getHistoricIdentityLinksForCaseInstance(caseInstance.getId())).hasSize(1);
cmmnRuntimeService.deleteUserIdentityLink(caseInstance.getId(), "someUser", IdentityLinkType.PARTICIPANT);
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.getHistoricIdentityLinksForCaseInstance(caseInstance.getId())).isEmpty();
}
@Test
@CmmnDeployment
public void testVariables() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("oneHumanTaskCase").start();
assertThat(cmmnHistoryService.createHistoricVariableInstanceQuery().caseInstanceId(caseInstance.getId()).count()).isZero();
cmmnRuntimeService.setVariable(caseInstance.getId(), "test", "hello world");
cmmnRuntimeService.setVariable(caseInstance.getId(), "test2", 2);
// Create
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricVariableInstanceQuery().caseInstanceId(caseInstance.getId()).count()).isEqualTo(2);
HistoricVariableInstance historicVariableInstance = cmmnHistoryService.createHistoricVariableInstanceQuery().caseInstanceId(caseInstance.getId())
.variableName("test").singleResult();
assertThat(historicVariableInstance.getVariableName()).isEqualTo("test");
assertThat(historicVariableInstance.getScopeId()).isEqualTo(caseInstance.getId());
assertThat(historicVariableInstance.getScopeType()).isEqualTo(ScopeTypes.CMMN);
assertThat(historicVariableInstance.getValue()).isEqualTo("hello world");
assertThat(historicVariableInstance.getCreateTime()).isNotNull();
assertThat(historicVariableInstance.getLastUpdatedTime()).isNotNull();
historicVariableInstance = cmmnHistoryService.createHistoricVariableInstanceQuery().caseInstanceId(caseInstance.getId()).variableName("test2")
.singleResult();
assertThat(historicVariableInstance.getVariableName()).isEqualTo("test2");
assertThat(historicVariableInstance.getScopeId()).isEqualTo(caseInstance.getId());
assertThat(historicVariableInstance.getSubScopeId()).isNull();
assertThat(historicVariableInstance.getScopeType()).isEqualTo(ScopeTypes.CMMN);
assertThat(historicVariableInstance.getValue()).isEqualTo(2);
assertThat(historicVariableInstance.getCreateTime()).isNotNull();
assertThat(historicVariableInstance.getLastUpdatedTime()).isNotNull();
// Update
try {
Thread.sleep(16); // wait time for diff in last updated time
} catch (InterruptedException e) {
e.printStackTrace();
}
cmmnRuntimeService.setVariable(caseInstance.getId(), "test", "hello test");
waitForAsyncHistoryExecutorToProcessAllJobs();
HistoricVariableInstance updatedHistoricVariable = cmmnHistoryService.createHistoricVariableInstanceQuery().caseInstanceId(caseInstance.getId())
.variableName("test").singleResult();
assertThat(updatedHistoricVariable.getVariableName()).isEqualTo("test");
assertThat(updatedHistoricVariable.getScopeId()).isEqualTo(caseInstance.getId());
assertThat(updatedHistoricVariable.getSubScopeId()).isNull();
assertThat(updatedHistoricVariable.getScopeType()).isEqualTo(ScopeTypes.CMMN);
assertThat(updatedHistoricVariable.getValue()).isEqualTo("hello test");
assertThat(updatedHistoricVariable.getCreateTime()).isNotNull();
assertThat(updatedHistoricVariable.getLastUpdatedTime()).isNotNull();
assertThat(historicVariableInstance.getLastUpdatedTime()).isNotEqualTo(updatedHistoricVariable.getLastUpdatedTime());
// Delete
cmmnRuntimeService.removeVariable(caseInstance.getId(), "test");
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricVariableInstanceQuery().caseInstanceId(caseInstance.getId()).variableName("test").singleResult()).isNull();
}
@Test
@CmmnDeployment
public void testHumanTask() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("oneHumanTaskCase").start();
assertThat(cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).count()).isEqualTo(1);
assertThat(cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).count()).isZero();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).count()).isEqualTo(1);
// Create
HistoricTaskInstance historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance.getName()).isEqualTo("The Task");
assertThat(historicTaskInstance.getAssignee()).isEqualTo("johnDoe");
assertThat(historicTaskInstance.getScopeId()).isEqualTo(caseInstance.getId());
assertThat(historicTaskInstance.getScopeDefinitionId()).isEqualTo(caseInstance.getCaseDefinitionId());
assertThat(historicTaskInstance.getScopeType()).isEqualTo(ScopeTypes.CMMN);
assertThat(historicTaskInstance.getCreateTime()).isNotNull();
// Update
cmmnTaskService.setAssignee(historicTaskInstance.getId(), "janeDoe");
waitForAsyncHistoryExecutorToProcessAllJobs();
historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance.getName()).isEqualTo("The Task");
assertThat(historicTaskInstance.getAssignee()).isEqualTo("janeDoe");
cmmnTaskService.setPriority(historicTaskInstance.getId(), 99);
waitForAsyncHistoryExecutorToProcessAllJobs();
historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance.getPriority()).isEqualTo(99);
assertThat(historicTaskInstance.getEndTime()).isNull();
List<PlanItemInstance> planItemInstances = cmmnRuntimeService.createPlanItemInstanceQuery()
.planItemDefinitionType(PlanItemDefinitionType.HUMAN_TASK)
.orderByName().asc()
.list();
assertThat(planItemInstances).extracting(PlanItemInstance::getName).containsExactly("The Task");
assertThat(planItemInstances).extracting(PlanItemInstance::getCreateTime).isNotNull();
// Complete
Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult();
cmmnTaskService.complete(task.getId());
assertThat(cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).count()).isZero();
waitForAsyncHistoryExecutorToProcessAllJobs();
historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance.getEndTime()).isNotNull();
List<HistoricPlanItemInstance> historicPlanItemInstances = cmmnHistoryService.createHistoricPlanItemInstanceQuery()
.planItemInstanceDefinitionType(PlanItemDefinitionType.HUMAN_TASK)
.list();
assertThat(historicPlanItemInstances).extracting(HistoricPlanItemInstance::getName).containsExactly("The Task");
assertThat(historicPlanItemInstances).extracting(HistoricPlanItemInstance::getCreateTime).isNotNull();
}
@Test
@CmmnDeployment
public void testHumanTaskWithCandidateUsersAndGroups() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("oneHumanTaskCase").start();
Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(task).isNotNull();
assertThat(cmmnTaskService.getIdentityLinksForTask(task.getId()))
.extracting(IdentityLink::getType, IdentityLink::getUserId, IdentityLink::getGroupId)
.containsExactlyInAnyOrder(
tuple("assignee", "johnDoe", null),
tuple("candidate", "user1", null),
tuple("candidate", null, "group1"),
tuple("candidate", null, "group2")
);
assertThatThrownBy(() -> cmmnHistoryService.getHistoricIdentityLinksForTask(task.getId()))
.isInstanceOf(FlowableObjectNotFoundException.class)
.hasMessageContaining("No historic task exists");
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.getHistoricIdentityLinksForTask(task.getId()))
.extracting(HistoricIdentityLink::getType, HistoricIdentityLink::getUserId, HistoricIdentityLink::getGroupId)
.containsExactlyInAnyOrder(
tuple("assignee", "johnDoe", null),
tuple("candidate", "user1", null),
tuple("candidate", null, "group1"),
tuple("candidate", null, "group2")
);
}
@Test
@CmmnDeployment(resources = "org/flowable/cmmn/test/async/AsyncCmmnHistoryTest.testHumanTask.cmmn")
public void testHumanTaskWithNameDueDateAndDescription() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("oneHumanTaskCase").start();
Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(task).isNotNull();
task.setName("Test name");
task.setDescription("Test description");
cmmnTaskService.saveTask(task);
waitForAsyncHistoryExecutorToProcessAllJobs();
// Create
HistoricTaskInstance historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance).isNotNull();
assertThat(historicTaskInstance.getName()).isEqualTo("Test name");
assertThat(historicTaskInstance.getDescription()).isEqualTo("Test description");
assertThat(historicTaskInstance.getDueDate()).isNull();
// Set due date
Date dueDate = Date.from(Instant.now().with(ChronoField.MILLI_OF_SECOND, 0));
cmmnTaskService.setDueDate(task.getId(), dueDate);
waitForAsyncHistoryExecutorToProcessAllJobs();
historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance.getDueDate()).isEqualTo(dueDate);
// Update name and description to null
task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult();
task.setName(null);
task.setDescription(null);
cmmnTaskService.saveTask(task);
// Before the history jobs it has the old data
historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance.getName()).isEqualTo("Test name");
assertThat(historicTaskInstance.getDescription()).isEqualTo("Test description");
waitForAsyncHistoryExecutorToProcessAllJobs();
// After the history jobs it has the new data
historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance.getName()).isNull();
assertThat(historicTaskInstance.getDescription()).isNull();
// Update dueDate to null
cmmnTaskService.setDueDate(task.getId(), null);
// Before the history jobs it has the old data
historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance.getDueDate()).isEqualTo(dueDate);
waitForAsyncHistoryExecutorToProcessAllJobs();
// After the history jobs it has the new data
historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance.getDueDate()).isNull();
}
@Test
@CmmnDeployment
public void testCasePageTask() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("oneCasePageTask").start();
assertThat(cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).count()).isEqualTo(1);
assertThat(cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceCaseInstanceId(caseInstance.getId()).count()).isZero();
assertThat(cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).count()).isZero();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceCaseInstanceId(caseInstance.getId()).count()).isEqualTo(2);
assertThat(cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).count()).isEqualTo(1);
HistoricPlanItemInstance historicPlanItemInstance = cmmnHistoryService.createHistoricPlanItemInstanceQuery()
.planItemInstanceFormKey("testKey")
.planItemInstanceCaseInstanceId(caseInstance.getId())
.singleResult();
assertThat(historicPlanItemInstance.getName()).isEqualTo("The Case Page Task");
assertThat(historicPlanItemInstance.getFormKey()).isEqualTo("testKey");
assertThat(historicPlanItemInstance.getExtraValue()).isEqualTo("testKey");
assertThat(historicPlanItemInstance.getEndedTime()).isNull();
List<HistoricIdentityLink> historicIdentityLinks = cmmnHistoryService.getHistoricIdentityLinksForPlanItemInstance(historicPlanItemInstance.getId());
assertThat(historicIdentityLinks).hasSize(5);
List<HistoricIdentityLink> historicAssigneeLink = historicIdentityLinks.stream()
.filter(identityLink -> identityLink.getType().equals(IdentityLinkType.ASSIGNEE)).collect(Collectors.toList());
assertThat(historicAssigneeLink)
.extracting(HistoricIdentityLink::getUserId)
.containsExactly("johnDoe");
List<HistoricIdentityLink> historicOwnerLink = historicIdentityLinks.stream()
.filter(identityLink -> identityLink.getType().equals(IdentityLinkType.OWNER)).collect(Collectors.toList());
assertThat(historicOwnerLink)
.extracting(HistoricIdentityLink::getUserId)
.containsExactly("janeDoe");
List<HistoricIdentityLink> historicCandidateUserLinks = historicIdentityLinks.stream()
.filter(identityLink -> identityLink.getType().equals(IdentityLinkType.CANDIDATE) &&
identityLink.getUserId() != null).collect(Collectors.toList());
List<String> linkValues = new ArrayList<>();
for (HistoricIdentityLink candidateLink : historicCandidateUserLinks) {
linkValues.add(candidateLink.getUserId());
}
assertThat(extractProperty("userId").from(historicCandidateUserLinks))
.containsExactlyInAnyOrder("johnDoe", "janeDoe");
List<HistoricIdentityLink> historicGroupLink = historicIdentityLinks.stream()
.filter(identityLink -> identityLink.getType().equals(IdentityLinkType.CANDIDATE) &&
identityLink.getGroupId() != null).collect(Collectors.toList());
assertThat(historicGroupLink)
.extracting(HistoricIdentityLink::getGroupId)
.containsExactly("sales");
// Complete
Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult();
cmmnTaskService.complete(task.getId());
assertThat(cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).count()).isZero();
waitForAsyncHistoryExecutorToProcessAllJobs();
HistoricTaskInstance historicTaskInstance = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(historicTaskInstance.getEndTime()).isNotNull();
assertThat(cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceCaseInstanceId(caseInstance.getId()).count()).isEqualTo(2);
historicPlanItemInstance = cmmnHistoryService.createHistoricPlanItemInstanceQuery()
.planItemInstanceFormKey("testKey")
.planItemInstanceCaseInstanceId(caseInstance.getId())
.singleResult();
assertThat(historicPlanItemInstance.getEndedTime()).isNotNull();
}
@Test
@CmmnDeployment
public void testPlanItemInstances() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("testSimpleCaseFlow").start();
List<PlanItemInstance> currentPlanItemInstances = cmmnRuntimeService.createPlanItemInstanceQuery().caseInstanceId(caseInstance.getId()).list();
assertThat(currentPlanItemInstances).hasSize(3);
assertThat(cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceCaseInstanceId(caseInstance.getId()).count()).isZero();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceCaseInstanceId(caseInstance.getId()).count()).isEqualTo(3);
List<HistoricPlanItemInstance> historicPlanItemInstances = cmmnHistoryService.createHistoricPlanItemInstanceQuery()
.planItemInstanceCaseInstanceId(caseInstance.getId()).list();
assertThat(historicPlanItemInstances.stream().map(HistoricPlanItemInstance::getPlanItemDefinitionType)
.anyMatch(PlanItemDefinitionType.STAGE::equalsIgnoreCase)).isTrue();
assertThat(historicPlanItemInstances.stream().map(HistoricPlanItemInstance::getPlanItemDefinitionType)
.anyMatch(PlanItemDefinitionType.MILESTONE::equalsIgnoreCase)).isTrue();
assertThat(historicPlanItemInstances.stream()
.anyMatch(h -> "task".equalsIgnoreCase(h.getPlanItemDefinitionType()) && "planItemTaskA".equalsIgnoreCase(h.getElementId()))).isTrue();
boolean showInOverviewMilestone = false;
Date lastEnabledTimeTaskA = null;
for (HistoricPlanItemInstance historicPlanItemInstance : historicPlanItemInstances) {
assertThat(historicPlanItemInstance.getCaseInstanceId()).isEqualTo(caseInstance.getId());
assertThat(historicPlanItemInstance.getCaseDefinitionId()).isEqualTo(caseInstance.getCaseDefinitionId());
assertThat(historicPlanItemInstance)
.extracting(
HistoricPlanItemInstance::getElementId,
HistoricPlanItemInstance::getCreateTime,
HistoricPlanItemInstance::getLastAvailableTime)
.doesNotContainNull();
assertThat(historicPlanItemInstance)
.extracting(
HistoricPlanItemInstance::getEndedTime,
HistoricPlanItemInstance::getLastDisabledTime,
HistoricPlanItemInstance::getLastSuspendedTime,
HistoricPlanItemInstance::getExitTime,
HistoricPlanItemInstance::getTerminatedTime,
HistoricPlanItemInstance::getEntryCriterionId,
HistoricPlanItemInstance::getExitCriterionId)
.containsOnlyNulls();
if ("planItemTaskA".equals(historicPlanItemInstance.getElementId())) {
lastEnabledTimeTaskA = historicPlanItemInstance.getLastEnabledTime();
} else if ("planItemMilestoneOne".equals(historicPlanItemInstance.getElementId())) {
showInOverviewMilestone = historicPlanItemInstance.isShowInOverview();
} else {
assertThat(historicPlanItemInstance.getLastEnabledTime()).isNull();
}
}
assertThat(lastEnabledTimeTaskA).isNotNull();
assertThat(showInOverviewMilestone).isTrue();
// Disable task
PlanItemInstance task = cmmnRuntimeService.createPlanItemInstanceQuery().planItemInstanceElementId("planItemTaskA").singleResult();
assertThat(task).isNotNull();
cmmnRuntimeService.disablePlanItemInstance(task.getId());
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnManagementService.createHistoryJobQuery().scopeType(ScopeTypes.CMMN).count()).isZero();
assertThat(cmmnManagementService.createDeadLetterJobQuery().scopeType(ScopeTypes.CMMN).count()).isZero();
HistoricPlanItemInstance historicPlanItemInstance = cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceId(task.getId())
.singleResult();
assertThat(historicPlanItemInstance.getState()).isEqualTo(PlanItemInstanceState.DISABLED);
assertThat(historicPlanItemInstance)
.extracting(
HistoricPlanItemInstance::getLastEnabledTime,
HistoricPlanItemInstance::getLastDisabledTime,
HistoricPlanItemInstance::getLastAvailableTime,
HistoricPlanItemInstance::getLastUpdatedTime)
.doesNotContainNull();
assertThat(historicPlanItemInstance)
.extracting(
HistoricPlanItemInstance::getLastStartedTime,
HistoricPlanItemInstance::getEndedTime,
HistoricPlanItemInstance::getLastSuspendedTime,
HistoricPlanItemInstance::getExitTime,
HistoricPlanItemInstance::getTerminatedTime)
.containsOnlyNulls();
// Enable task
cmmnRuntimeService.enablePlanItemInstance(task.getId());
waitForAsyncHistoryExecutorToProcessAllJobs();
historicPlanItemInstance = cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceId(task.getId()).singleResult();
assertThat(historicPlanItemInstance.getState()).isEqualTo(PlanItemInstanceState.ENABLED);
assertThat(historicPlanItemInstance)
.extracting(
HistoricPlanItemInstance::getLastEnabledTime,
HistoricPlanItemInstance::getLastDisabledTime,
HistoricPlanItemInstance::getLastAvailableTime,
HistoricPlanItemInstance::getLastUpdatedTime)
.doesNotContainNull();
assertThat(historicPlanItemInstance)
.extracting(
HistoricPlanItemInstance::getLastStartedTime,
HistoricPlanItemInstance::getEndedTime,
HistoricPlanItemInstance::getLastSuspendedTime,
HistoricPlanItemInstance::getExitTime,
HistoricPlanItemInstance::getTerminatedTime)
.containsOnlyNulls();
// Manually enable
cmmnRuntimeService.startPlanItemInstance(task.getId());
waitForAsyncHistoryExecutorToProcessAllJobs();
historicPlanItemInstance = cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceId(task.getId()).singleResult();
assertThat(historicPlanItemInstance.getLastStartedTime()).isNotNull();
assertThat(historicPlanItemInstance.getEndedTime()).isNull();
// Complete task
Calendar clockCal = cmmnEngineConfiguration.getClock().getCurrentCalendar();
clockCal.add(Calendar.HOUR, 1);
setClockTo(clockCal.getTime());
cmmnRuntimeService.triggerPlanItemInstance(task.getId());
waitForAsyncHistoryExecutorToProcessAllJobs();
HistoricPlanItemInstance completedHistoricPlanItemInstance = cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceId(task.getId())
.singleResult();
assertThat(completedHistoricPlanItemInstance)
.extracting(
HistoricPlanItemInstance::getLastEnabledTime,
HistoricPlanItemInstance::getLastDisabledTime,
HistoricPlanItemInstance::getLastAvailableTime,
HistoricPlanItemInstance::getLastStartedTime,
HistoricPlanItemInstance::getLastUpdatedTime)
.doesNotContainNull();
assertThat(historicPlanItemInstance)
.extracting(
HistoricPlanItemInstance::getEndedTime,
HistoricPlanItemInstance::getLastSuspendedTime,
HistoricPlanItemInstance::getExitTime,
HistoricPlanItemInstance::getTerminatedTime)
.containsOnlyNulls();
assertThat(historicPlanItemInstance.getLastUpdatedTime()).isBefore(completedHistoricPlanItemInstance.getLastUpdatedTime());
HistoricPlanItemInstance completedMilestoneInstance = cmmnHistoryService.createHistoricPlanItemInstanceQuery()
.planItemInstanceElementId("planItemMilestoneOne").singleResult();
assertThat(completedMilestoneInstance.getEndedTime()).isNotNull();
assertThat(completedMilestoneInstance.isShowInOverview()).isTrue();
cmmnEngineConfiguration.getClock().reset();
}
@Test
@CmmnDeployment
public void testCriterionStoredOnPlanItemInstance() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("testCriterions").start();
// Executing the tasks triggers the entry criterion
Task taskB = cmmnTaskService.createTaskQuery().taskName("B").singleResult();
cmmnTaskService.complete(taskB.getId());
assertThat(cmmnRuntimeService.createPlanItemInstanceQuery().planItemInstanceName("C").singleResult().getEntryCriterionId()).isEqualTo("entryA2");
waitForAsyncHistoryExecutorToProcessAllJobs();
HistoricPlanItemInstance planItemInstanceC = cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceName("C").singleResult();
assertThat(planItemInstanceC.getEntryCriterionId()).isEqualTo("entryA2");
assertThat(planItemInstanceC.getExitCriterionId()).isNull();
// Completing will set the exit criterion
UserEventListenerInstance userEventListenerInstance = cmmnRuntimeService.createUserEventListenerInstanceQuery().caseInstanceId(caseInstance.getId())
.singleResult();
cmmnRuntimeService.completeUserEventListenerInstance(userEventListenerInstance.getId());
waitForAsyncHistoryExecutorToProcessAllJobs();
planItemInstanceC = cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceName("C").singleResult();
assertThat(planItemInstanceC.getEntryCriterionId()).isEqualTo("entryA2");
assertThat(planItemInstanceC.getExitCriterionId()).isEqualTo("stop");
}
@Test
public void createUserTaskLogEntity() {
HistoricTaskLogEntryBuilder historicTaskLogEntryBuilder = cmmnHistoryService.createHistoricTaskLogEntryBuilder();
Date todayDate = new Date();
historicTaskLogEntryBuilder.taskId("1");
historicTaskLogEntryBuilder.type("testType");
historicTaskLogEntryBuilder.userId("testUserId");
historicTaskLogEntryBuilder.data("testData");
historicTaskLogEntryBuilder.scopeId("testScopeId");
historicTaskLogEntryBuilder.scopeType("testScopeType");
historicTaskLogEntryBuilder.scopeDefinitionId("testDefinitionId");
historicTaskLogEntryBuilder.subScopeId("testSubScopeId");
historicTaskLogEntryBuilder.timeStamp(todayDate);
historicTaskLogEntryBuilder.tenantId("testTenant");
historicTaskLogEntryBuilder.create();
HistoricTaskLogEntry historicTaskLogEntry = null;
try {
assertThat(cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId("1").count()).isZero();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId("1").count()).isEqualTo(1);
historicTaskLogEntry = cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId("1").singleResult();
assertThat(historicTaskLogEntry.getLogNumber()).isPositive();
assertThat(historicTaskLogEntry.getTaskId()).isEqualTo("1");
assertThat(historicTaskLogEntry.getType()).isEqualTo("testType");
assertThat(historicTaskLogEntry.getUserId()).isEqualTo("testUserId");
assertThat(historicTaskLogEntry.getScopeId()).isEqualTo("testScopeId");
assertThat(historicTaskLogEntry.getScopeType()).isEqualTo("testScopeType");
assertThat(historicTaskLogEntry.getScopeDefinitionId()).isEqualTo("testDefinitionId");
assertThat(historicTaskLogEntry.getSubScopeId()).isEqualTo("testSubScopeId");
assertThat(historicTaskLogEntry.getData()).isEqualTo("testData");
assertThat(historicTaskLogEntry.getLogNumber()).isPositive();
assertThat(historicTaskLogEntry.getTimeStamp()).isNotNull();
assertThat(historicTaskLogEntry.getTenantId()).isEqualTo("testTenant");
} finally {
if (historicTaskLogEntry != null) {
cmmnHistoryService.deleteHistoricTaskLogEntry(historicTaskLogEntry.getLogNumber());
waitForAsyncHistoryExecutorToProcessAllJobs();
}
}
}
@Test
public void createCmmnAsynchUserTaskLogEntries() {
CaseInstance caseInstance = deployAndStartOneHumanTaskCaseModel();
Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult();
task.setName("newName");
task.setPriority(0);
cmmnTaskService.saveTask(task);
cmmnTaskService.setAssignee(task.getId(), "newAssignee");
cmmnTaskService.setOwner(task.getId(), "newOwner");
cmmnTaskService.setDueDate(task.getId(), new Date());
cmmnTaskService.addUserIdentityLink(task.getId(), "testUser", IdentityLinkType.PARTICIPANT);
cmmnTaskService.addGroupIdentityLink(task.getId(), "testGroup", IdentityLinkType.PARTICIPANT);
cmmnTaskService.deleteUserIdentityLink(task.getId(), "testUser", IdentityLinkType.PARTICIPANT);
cmmnTaskService.deleteGroupIdentityLink(task.getId(), "testGroup", IdentityLinkType.PARTICIPANT);
cmmnTaskService.complete(task.getId());
assertThat(cmmnHistoryService.createHistoricTaskLogEntryQuery().count()).isZero();
assertThat(cmmnManagementService.createHistoryJobQuery().count()).isEqualTo(10l);
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).count()).isEqualTo(11l);
assertThat(cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).type(HistoricTaskLogEntryType.USER_TASK_CREATED.name()).count())
.isEqualTo(1);
assertThat(
cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).type(HistoricTaskLogEntryType.USER_TASK_NAME_CHANGED.name()).count())
.isEqualTo(1);
assertThat(
cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).type(HistoricTaskLogEntryType.USER_TASK_PRIORITY_CHANGED.name())
.count())
.isEqualTo(1);
assertThat(
cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).type(HistoricTaskLogEntryType.USER_TASK_ASSIGNEE_CHANGED.name())
.count())
.isEqualTo(1);
assertThat(
cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).type(HistoricTaskLogEntryType.USER_TASK_OWNER_CHANGED.name()).count())
.isEqualTo(1);
assertThat(
cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).type(HistoricTaskLogEntryType.USER_TASK_DUEDATE_CHANGED.name())
.count())
.isEqualTo(1);
assertThat(cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).type(HistoricTaskLogEntryType.USER_TASK_IDENTITY_LINK_ADDED.name())
.count()).isEqualTo(2);
assertThat(
cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).type(HistoricTaskLogEntryType.USER_TASK_IDENTITY_LINK_REMOVED.name())
.count()).isEqualTo(2);
assertThat(cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).type(HistoricTaskLogEntryType.USER_TASK_COMPLETED.name()).count())
.isEqualTo(1);
}
@Test
public void deleteAsynchUserTaskLogEntries() {
CaseInstance caseInstance = deployAndStartOneHumanTaskCaseModel();
Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult();
assertThat(cmmnHistoryService.createHistoricTaskLogEntryQuery().count()).isZero();
assertThat(cmmnManagementService.createHistoryJobQuery().count()).isEqualTo(1);
waitForAsyncHistoryExecutorToProcessAllJobs();
List<HistoricTaskLogEntry> historicTaskLogEntries = cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).list();
assertThat(historicTaskLogEntries).hasSize(1);
cmmnHistoryService.deleteHistoricTaskLogEntry(historicTaskLogEntries.get(0).getLogNumber());
assertThat(cmmnManagementService.createHistoryJobQuery().count()).isEqualTo(1);
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricTaskLogEntryQuery().taskId(task.getId()).count()).isZero();
}
@Test
@CmmnDeployment
public void createRootEntityLink() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder()
.caseDefinitionKey("oneHumanTaskCase")
.name("someName")
.businessKey("someBusinessKey")
.start();
assertThat(cmmnHistoryService.createHistoricCaseInstanceQuery().count()).isZero();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricCaseInstanceQuery().count()).isEqualTo(1);
Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult();
cmmnTaskService.complete(task.getId());
waitForAsyncHistoryExecutorToProcessAllJobs();
assertCaseInstanceEnded(caseInstance);
CommandExecutor commandExecutor = cmmnEngine.getCmmnEngineConfiguration().getCommandExecutor();
List<HistoricEntityLink> entityLinksByScopeIdAndType = commandExecutor.execute(commandContext -> {
HistoricEntityLinkService historicEntityLinkService = cmmnEngineConfiguration.getEntityLinkServiceConfiguration().getHistoricEntityLinkService();
return historicEntityLinkService.findHistoricEntityLinksByReferenceScopeIdAndType(task.getId(), ScopeTypes.TASK, EntityLinkType.CHILD);
});
assertThat(entityLinksByScopeIdAndType)
.extracting(HistoricEntityLink::getHierarchyType)
.containsExactly("root");
}
@Test
@CmmnDeployment
public void testPlanItemInstancesStateChangesWithFixedTime() {
// We need to make sure the time ends on .000, .003 or .007 due to SQL Server rounding to that
Date fixTime = Date.from(Instant.now().truncatedTo(ChronoUnit.SECONDS).plusMillis(823));
cmmnEngineConfiguration.getClock().setCurrentTime(fixTime);
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder()
.caseDefinitionKey("allStates")
.start();
List<PlanItemInstance> runtimePlanItemInstances = cmmnRuntimeService.createPlanItemInstanceQuery().caseInstanceId(caseInstance.getId()).list();
assertThat(runtimePlanItemInstances)
.extracting(PlanItemInstance::getPlanItemDefinitionId, PlanItemInstance::getState)
.as("planItemDefinitionId, state")
.containsExactlyInAnyOrder(
tuple("eventListenerAvailable", PlanItemInstanceState.AVAILABLE),
tuple("eventListenerUnavailable", PlanItemInstanceState.UNAVAILABLE),
tuple("serviceTaskAvailableEnabled", PlanItemInstanceState.ENABLED),
tuple("serviceTaskAvailableAsyncActive", PlanItemInstanceState.ASYNC_ACTIVE)
);
Map<String, PlanItemInstance> runtimePlanItemInstancesByDefinitionId = runtimePlanItemInstances.stream()
.collect(Collectors.toMap(PlanItemInstance::getPlanItemDefinitionId, Function.identity()));
PlanItemInstance eventListenerAvailable = runtimePlanItemInstancesByDefinitionId.get("eventListenerAvailable");
assertThat(eventListenerAvailable).extracting(
PlanItemInstance::getCompletedTime,
PlanItemInstance::getEndedTime,
PlanItemInstance::getOccurredTime,
PlanItemInstance::getTerminatedTime,
PlanItemInstance::getExitTime,
PlanItemInstance::getLastEnabledTime,
PlanItemInstance::getLastDisabledTime,
PlanItemInstance::getLastStartedTime,
PlanItemInstance::getLastSuspendedTime
).containsOnlyNulls();
assertThat(eventListenerAvailable).extracting(
PlanItemInstance::getCreateTime,
PlanItemInstance::getLastAvailableTime
).containsOnly(fixTime);
PlanItemInstance eventListenerUnavailable = runtimePlanItemInstancesByDefinitionId.get("eventListenerUnavailable");
assertThat(eventListenerUnavailable).extracting(
PlanItemInstance::getCompletedTime,
PlanItemInstance::getEndedTime,
PlanItemInstance::getOccurredTime,
PlanItemInstance::getTerminatedTime,
PlanItemInstance::getExitTime,
PlanItemInstance::getLastEnabledTime,
PlanItemInstance::getLastAvailableTime,
PlanItemInstance::getLastDisabledTime,
PlanItemInstance::getLastStartedTime,
PlanItemInstance::getLastSuspendedTime
).containsOnlyNulls();
assertThat(eventListenerUnavailable).extracting(
PlanItemInstance::getCreateTime
).isEqualTo(fixTime);
PlanItemInstance serviceTaskAvailableEnabled = runtimePlanItemInstancesByDefinitionId.get("serviceTaskAvailableEnabled");
assertThat(serviceTaskAvailableEnabled).extracting(
PlanItemInstance::getCompletedTime,
PlanItemInstance::getEndedTime,
PlanItemInstance::getOccurredTime,
PlanItemInstance::getTerminatedTime,
PlanItemInstance::getExitTime,
PlanItemInstance::getLastDisabledTime,
PlanItemInstance::getLastStartedTime,
PlanItemInstance::getLastSuspendedTime
).containsOnlyNulls();
assertThat(serviceTaskAvailableEnabled).extracting(
PlanItemInstance::getCreateTime,
PlanItemInstance::getLastEnabledTime,
PlanItemInstance::getLastAvailableTime
).containsOnly(fixTime);
PlanItemInstance serviceTaskAvailableAsyncActive = runtimePlanItemInstancesByDefinitionId.get("serviceTaskAvailableAsyncActive");
assertThat(serviceTaskAvailableAsyncActive).extracting(
PlanItemInstance::getCompletedTime,
PlanItemInstance::getEndedTime,
PlanItemInstance::getOccurredTime,
PlanItemInstance::getTerminatedTime,
PlanItemInstance::getExitTime,
PlanItemInstance::getLastEnabledTime,
PlanItemInstance::getLastDisabledTime,
PlanItemInstance::getLastSuspendedTime
).containsOnlyNulls();
assertThat(serviceTaskAvailableAsyncActive).extracting(
PlanItemInstance::getCreateTime,
PlanItemInstance::getLastAvailableTime,
PlanItemInstance::getLastStartedTime
).containsOnly(fixTime);
assertThat(cmmnHistoryService.createHistoricPlanItemInstanceQuery().planItemInstanceCaseInstanceId(caseInstance.getId()).list())
.extracting(HistoricPlanItemInstance::getPlanItemDefinitionId, HistoricPlanItemInstance::getState)
.isEmpty();
waitForAsyncHistoryExecutorToProcessAllJobs();
List<HistoricPlanItemInstance> historicPlanItemInstances = cmmnHistoryService.createHistoricPlanItemInstanceQuery()
.planItemInstanceCaseInstanceId(caseInstance.getId())
.list();
assertThat(historicPlanItemInstances)
.extracting(HistoricPlanItemInstance::getPlanItemDefinitionId, HistoricPlanItemInstance::getState)
.containsExactlyInAnyOrder(
tuple("serviceTaskAvailableActiveCompleted", PlanItemInstanceState.COMPLETED),
tuple("stageAvailableActiveTerminated", PlanItemInstanceState.TERMINATED),
tuple("humanTaskAvailableActiveTerminatedAndWaitingForRepetition", PlanItemInstanceState.TERMINATED),
tuple("eventListenerAvailable", PlanItemInstanceState.AVAILABLE),
tuple("eventListenerUnavailable", PlanItemInstanceState.UNAVAILABLE),
tuple("serviceTaskAvailableEnabled", PlanItemInstanceState.ENABLED),
tuple("serviceTaskAvailableAsyncActive", PlanItemInstanceState.ASYNC_ACTIVE)
);
Map<String, HistoricPlanItemInstance> historicPlanItemInstancesByDefinitionId = historicPlanItemInstances.stream()
.collect(Collectors.toMap(HistoricPlanItemInstance::getPlanItemDefinitionId, Function.identity()));
HistoricPlanItemInstance historicEventListenerAvailable = historicPlanItemInstancesByDefinitionId.get("eventListenerAvailable");
assertThat(historicEventListenerAvailable).extracting(
HistoricPlanItemInstance::getCompletedTime,
HistoricPlanItemInstance::getEndedTime,
HistoricPlanItemInstance::getOccurredTime,
HistoricPlanItemInstance::getTerminatedTime,
HistoricPlanItemInstance::getExitTime,
HistoricPlanItemInstance::getLastEnabledTime,
HistoricPlanItemInstance::getLastDisabledTime,
HistoricPlanItemInstance::getLastStartedTime,
HistoricPlanItemInstance::getLastSuspendedTime
).containsOnlyNulls();
assertThat(historicEventListenerAvailable).extracting(
HistoricPlanItemInstance::getCreateTime,
HistoricPlanItemInstance::getLastAvailableTime
).containsOnly(fixTime);
HistoricPlanItemInstance historicEventListenerUnavailable = historicPlanItemInstancesByDefinitionId.get("eventListenerUnavailable");
assertThat(historicEventListenerUnavailable).extracting(
HistoricPlanItemInstance::getCompletedTime,
HistoricPlanItemInstance::getEndedTime,
HistoricPlanItemInstance::getOccurredTime,
HistoricPlanItemInstance::getTerminatedTime,
HistoricPlanItemInstance::getExitTime,
HistoricPlanItemInstance::getLastEnabledTime,
HistoricPlanItemInstance::getLastAvailableTime,
HistoricPlanItemInstance::getLastDisabledTime,
HistoricPlanItemInstance::getLastStartedTime,
HistoricPlanItemInstance::getLastSuspendedTime
).containsOnlyNulls();
assertThat(historicEventListenerUnavailable).extracting(
HistoricPlanItemInstance::getCreateTime
).isEqualTo(fixTime);
HistoricPlanItemInstance historicServiceTaskAvailableEnabled = historicPlanItemInstancesByDefinitionId.get("serviceTaskAvailableEnabled");
assertThat(historicServiceTaskAvailableEnabled).extracting(
HistoricPlanItemInstance::getCompletedTime,
HistoricPlanItemInstance::getEndedTime,
HistoricPlanItemInstance::getOccurredTime,
HistoricPlanItemInstance::getTerminatedTime,
HistoricPlanItemInstance::getExitTime,
HistoricPlanItemInstance::getLastDisabledTime,
HistoricPlanItemInstance::getLastStartedTime,
HistoricPlanItemInstance::getLastSuspendedTime
).containsOnlyNulls();
assertThat(historicServiceTaskAvailableEnabled).extracting(
HistoricPlanItemInstance::getCreateTime,
HistoricPlanItemInstance::getLastEnabledTime,
HistoricPlanItemInstance::getLastAvailableTime
).containsOnly(fixTime);
HistoricPlanItemInstance historicServiceTaskAvailableActiveCompleted = historicPlanItemInstancesByDefinitionId
.get("serviceTaskAvailableActiveCompleted");
assertThat(historicServiceTaskAvailableActiveCompleted).extracting(
HistoricPlanItemInstance::getOccurredTime,
HistoricPlanItemInstance::getTerminatedTime,
HistoricPlanItemInstance::getExitTime,
HistoricPlanItemInstance::getLastEnabledTime,
HistoricPlanItemInstance::getLastDisabledTime,
HistoricPlanItemInstance::getLastSuspendedTime
).containsOnlyNulls();
assertThat(historicServiceTaskAvailableActiveCompleted).extracting(
HistoricPlanItemInstance::getCreateTime,
HistoricPlanItemInstance::getCompletedTime,
HistoricPlanItemInstance::getEndedTime,
HistoricPlanItemInstance::getLastAvailableTime,
HistoricPlanItemInstance::getLastStartedTime
).containsOnly(fixTime);
HistoricPlanItemInstance historicStageAvailableActiveTerminated = historicPlanItemInstancesByDefinitionId.get("stageAvailableActiveTerminated");
assertThat(historicStageAvailableActiveTerminated).extracting(
HistoricPlanItemInstance::getCompletedTime,
HistoricPlanItemInstance::getOccurredTime,
HistoricPlanItemInstance::getTerminatedTime,
HistoricPlanItemInstance::getLastEnabledTime,
HistoricPlanItemInstance::getLastDisabledTime,
HistoricPlanItemInstance::getLastSuspendedTime
).containsOnlyNulls();
assertThat(historicStageAvailableActiveTerminated).extracting(
HistoricPlanItemInstance::getCreateTime,
HistoricPlanItemInstance::getEndedTime,
HistoricPlanItemInstance::getExitTime,
HistoricPlanItemInstance::getLastAvailableTime,
HistoricPlanItemInstance::getLastStartedTime
).containsOnly(fixTime);
HistoricPlanItemInstance historicHumanTaskAvailableActiveTerminatedAndWaitingForRepetition = historicPlanItemInstancesByDefinitionId
.get("humanTaskAvailableActiveTerminatedAndWaitingForRepetition");
assertThat(historicHumanTaskAvailableActiveTerminatedAndWaitingForRepetition).extracting(
HistoricPlanItemInstance::getCompletedTime,
HistoricPlanItemInstance::getOccurredTime,
HistoricPlanItemInstance::getTerminatedTime,
HistoricPlanItemInstance::getLastEnabledTime,
HistoricPlanItemInstance::getLastDisabledTime,
HistoricPlanItemInstance::getLastSuspendedTime
).containsOnlyNulls();
assertThat(historicHumanTaskAvailableActiveTerminatedAndWaitingForRepetition).extracting(
HistoricPlanItemInstance::getCreateTime,
HistoricPlanItemInstance::getEndedTime,
HistoricPlanItemInstance::getExitTime,
HistoricPlanItemInstance::getLastAvailableTime,
HistoricPlanItemInstance::getLastStartedTime
).containsOnly(fixTime);
}
@Test
@CmmnDeployment
public void testBusinessKey() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder()
.caseDefinitionKey("businessKeyCase")
.businessKey("someBusinessKey")
.start();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricCaseInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult().getBusinessKey())
.isEqualTo("someBusinessKey");
cmmnRuntimeService.updateBusinessKey(caseInstance.getId(), "newBusinessKey");
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnHistoryService.createHistoricCaseInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult().getBusinessKey())
.isEqualTo("newBusinessKey");
}
@Test
@CmmnDeployment
public void testHistoryJobFailure() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder()
.caseDefinitionKey("oneHumanTaskCase")
.start();
// Fetch the first history job, and programmatically change the handler type, such that it will guaranteed fail.
HistoryJob historyJob = cmmnManagementService.createHistoryJobQuery().singleResult();
changeHistoryJsonToBeInvalid((HistoryJobEntity) historyJob);
assertThat(cmmnManagementService.createDeadLetterJobQuery().count()).isEqualTo(0);
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnManagementService.createHistoryJobQuery().count()).isEqualTo(0);
Job deadLetterJob = cmmnManagementService.createDeadLetterJobQuery().singleResult();
assertThat(deadLetterJob.getJobType()).isEqualTo(HistoryJobEntity.HISTORY_JOB_TYPE);
assertThat(deadLetterJob.getExceptionMessage()).isNotEmpty();
String deadLetterJobExceptionStacktrace = cmmnManagementService.getDeadLetterJobExceptionStacktrace(deadLetterJob.getId());
assertThat(deadLetterJobExceptionStacktrace).isNotEmpty();
// The history jobs in the deadletter table have no link to the case instance, hence why a manual cleanup is needed.
cmmnRuntimeService.terminateCaseInstance(caseInstance.getId());
cmmnManagementService.createHistoryJobQuery().list().forEach(j -> cmmnManagementService.deleteHistoryJob(j.getId()));
cmmnManagementService.createDeadLetterJobQuery().list().forEach(j -> cmmnManagementService.deleteDeadLetterJob(j.getId()));
}
@Test
@CmmnDeployment(resources = "org/flowable/cmmn/test/async/AsyncCmmnHistoryTest.testHistoryJobFailure.cmmn")
public void testMoveDeadLetterJobBackToHistoryJob() {
CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder()
.caseDefinitionKey("oneHumanTaskCase")
.start();
// Fetch the first history job, and programmatically change the handler type, such that it will guaranteed fail.
HistoryJob historyJob = cmmnManagementService.createHistoryJobQuery().singleResult();
changeHistoryJsonToBeInvalid((HistoryJobEntity) historyJob);
String originalAdvancedConfiguration = getAdvancedJobHandlerConfiguration(historyJob.getId());
assertThat(originalAdvancedConfiguration).isNotEmpty();
waitForAsyncHistoryExecutorToProcessAllJobs();
assertThat(cmmnManagementService.createHistoryJobQuery().count()).isEqualTo(0);
Job deadLetterJob = cmmnManagementService.createDeadLetterJobQuery().singleResult();
cmmnManagementService.moveDeadLetterJobToHistoryJob(deadLetterJob.getId(), 3);
assertThat(cmmnManagementService.createHistoryJobQuery().count()).isEqualTo(1);
historyJob = cmmnManagementService.createHistoryJobQuery().singleResult();
assertThat(historyJob.getCreateTime()).isNotNull();
assertThat(historyJob.getRetries()).isEqualTo(3);
assertThat(historyJob.getExceptionMessage()).isNotNull(); // this is consistent with regular jobs
assertThat(historyJob.getJobHandlerConfiguration()).isNull(); // needs to have been reset
String newAdvancedConfiguration = getAdvancedJobHandlerConfiguration(historyJob.getId());
assertThat(originalAdvancedConfiguration).isEqualTo(newAdvancedConfiguration);
// The history jobs in the deadletter table have no link to the case instance, hence why a manual cleanup is needed.
cmmnRuntimeService.terminateCaseInstance(caseInstance.getId());
cmmnManagementService.createHistoryJobQuery().list().forEach(j -> cmmnManagementService.deleteHistoryJob(j.getId()));
cmmnManagementService.createDeadLetterJobQuery().list().forEach(j -> cmmnManagementService.deleteDeadLetterJob(j.getId()));
}
protected String getAdvancedJobHandlerConfiguration(String historyJobId) {
return cmmnEngineConfiguration.getCommandExecutor().execute(new Command<String>() {
@Override
public String execute(CommandContext commandContext) {
JobServiceConfiguration jobServiceConfiguration = CommandContextUtil.getCmmnEngineConfiguration(commandContext).getJobServiceConfiguration();
HistoryJobEntity job = jobServiceConfiguration.getHistoryJobEntityManager().findById(historyJobId);
return job.getAdvancedJobHandlerConfiguration();
}
});
}
protected void changeHistoryJsonToBeInvalid(HistoryJobEntity historyJob) {
cmmnEngineConfiguration.getCommandExecutor().execute(new Command<Void>() {
@Override
public Void execute(CommandContext commandContext) {
try {
HistoryJobEntity historyJobEntity = historyJob;
ObjectMapper objectMapper = cmmnEngineConfiguration.getObjectMapper();
JsonNode historyJsonNode = objectMapper.readTree(historyJobEntity.getAdvancedJobHandlerConfiguration());
for (JsonNode jsonNode : historyJsonNode) {
if (jsonNode.has("type") && jsonNode.get("type").asText().equals("cmmn-case-instance-start")) {
((ObjectNode) jsonNode).put("type", "invalidType");
}
}
historyJobEntity.setAdvancedJobHandlerConfiguration(objectMapper.writeValueAsString(historyJsonNode));
} catch (JsonProcessingException e) {
Assert.fail();
}
return null;
}
});
}
}
|
|
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ecs.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.ecs.AmazonECS#submitContainerStateChange(SubmitContainerStateChangeRequest) SubmitContainerStateChange operation}.
* <p>
* <b>NOTE:</b> This action is only used by the Amazon EC2 Container
* Service agent, and it is not intended for use outside of the agent.
* </p>
* <p>
* Sent to acknowledge that a container changed states.
* </p>
*
* @see com.amazonaws.services.ecs.AmazonECS#submitContainerStateChange(SubmitContainerStateChangeRequest)
*/
public class SubmitContainerStateChangeRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the container.
*/
private String cluster;
/**
* The task UUID or full Amazon Resource Name (ARN) of the task that
* hosts the container.
*/
private String task;
/**
* The name of the container.
*/
private String containerName;
/**
* The status of the state change request.
*/
private String status;
/**
* The exit code returned for the state change request.
*/
private Integer exitCode;
/**
* The reason for the state change request.
*/
private String reason;
/**
* The network bindings of the container.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<NetworkBinding> networkBindings;
/**
* The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the container.
*
* @return The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the container.
*/
public String getCluster() {
return cluster;
}
/**
* The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the container.
*
* @param cluster The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the container.
*/
public void setCluster(String cluster) {
this.cluster = cluster;
}
/**
* The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the container.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param cluster The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the container.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SubmitContainerStateChangeRequest withCluster(String cluster) {
this.cluster = cluster;
return this;
}
/**
* The task UUID or full Amazon Resource Name (ARN) of the task that
* hosts the container.
*
* @return The task UUID or full Amazon Resource Name (ARN) of the task that
* hosts the container.
*/
public String getTask() {
return task;
}
/**
* The task UUID or full Amazon Resource Name (ARN) of the task that
* hosts the container.
*
* @param task The task UUID or full Amazon Resource Name (ARN) of the task that
* hosts the container.
*/
public void setTask(String task) {
this.task = task;
}
/**
* The task UUID or full Amazon Resource Name (ARN) of the task that
* hosts the container.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param task The task UUID or full Amazon Resource Name (ARN) of the task that
* hosts the container.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SubmitContainerStateChangeRequest withTask(String task) {
this.task = task;
return this;
}
/**
* The name of the container.
*
* @return The name of the container.
*/
public String getContainerName() {
return containerName;
}
/**
* The name of the container.
*
* @param containerName The name of the container.
*/
public void setContainerName(String containerName) {
this.containerName = containerName;
}
/**
* The name of the container.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param containerName The name of the container.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SubmitContainerStateChangeRequest withContainerName(String containerName) {
this.containerName = containerName;
return this;
}
/**
* The status of the state change request.
*
* @return The status of the state change request.
*/
public String getStatus() {
return status;
}
/**
* The status of the state change request.
*
* @param status The status of the state change request.
*/
public void setStatus(String status) {
this.status = status;
}
/**
* The status of the state change request.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param status The status of the state change request.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SubmitContainerStateChangeRequest withStatus(String status) {
this.status = status;
return this;
}
/**
* The exit code returned for the state change request.
*
* @return The exit code returned for the state change request.
*/
public Integer getExitCode() {
return exitCode;
}
/**
* The exit code returned for the state change request.
*
* @param exitCode The exit code returned for the state change request.
*/
public void setExitCode(Integer exitCode) {
this.exitCode = exitCode;
}
/**
* The exit code returned for the state change request.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param exitCode The exit code returned for the state change request.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SubmitContainerStateChangeRequest withExitCode(Integer exitCode) {
this.exitCode = exitCode;
return this;
}
/**
* The reason for the state change request.
*
* @return The reason for the state change request.
*/
public String getReason() {
return reason;
}
/**
* The reason for the state change request.
*
* @param reason The reason for the state change request.
*/
public void setReason(String reason) {
this.reason = reason;
}
/**
* The reason for the state change request.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param reason The reason for the state change request.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SubmitContainerStateChangeRequest withReason(String reason) {
this.reason = reason;
return this;
}
/**
* The network bindings of the container.
*
* @return The network bindings of the container.
*/
public java.util.List<NetworkBinding> getNetworkBindings() {
if (networkBindings == null) {
networkBindings = new com.amazonaws.internal.ListWithAutoConstructFlag<NetworkBinding>();
networkBindings.setAutoConstruct(true);
}
return networkBindings;
}
/**
* The network bindings of the container.
*
* @param networkBindings The network bindings of the container.
*/
public void setNetworkBindings(java.util.Collection<NetworkBinding> networkBindings) {
if (networkBindings == null) {
this.networkBindings = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<NetworkBinding> networkBindingsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<NetworkBinding>(networkBindings.size());
networkBindingsCopy.addAll(networkBindings);
this.networkBindings = networkBindingsCopy;
}
/**
* The network bindings of the container.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setNetworkBindings(java.util.Collection)} or {@link
* #withNetworkBindings(java.util.Collection)} if you want to override
* the existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param networkBindings The network bindings of the container.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SubmitContainerStateChangeRequest withNetworkBindings(NetworkBinding... networkBindings) {
if (getNetworkBindings() == null) setNetworkBindings(new java.util.ArrayList<NetworkBinding>(networkBindings.length));
for (NetworkBinding value : networkBindings) {
getNetworkBindings().add(value);
}
return this;
}
/**
* The network bindings of the container.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param networkBindings The network bindings of the container.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public SubmitContainerStateChangeRequest withNetworkBindings(java.util.Collection<NetworkBinding> networkBindings) {
if (networkBindings == null) {
this.networkBindings = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<NetworkBinding> networkBindingsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<NetworkBinding>(networkBindings.size());
networkBindingsCopy.addAll(networkBindings);
this.networkBindings = networkBindingsCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCluster() != null) sb.append("Cluster: " + getCluster() + ",");
if (getTask() != null) sb.append("Task: " + getTask() + ",");
if (getContainerName() != null) sb.append("ContainerName: " + getContainerName() + ",");
if (getStatus() != null) sb.append("Status: " + getStatus() + ",");
if (getExitCode() != null) sb.append("ExitCode: " + getExitCode() + ",");
if (getReason() != null) sb.append("Reason: " + getReason() + ",");
if (getNetworkBindings() != null) sb.append("NetworkBindings: " + getNetworkBindings() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCluster() == null) ? 0 : getCluster().hashCode());
hashCode = prime * hashCode + ((getTask() == null) ? 0 : getTask().hashCode());
hashCode = prime * hashCode + ((getContainerName() == null) ? 0 : getContainerName().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getExitCode() == null) ? 0 : getExitCode().hashCode());
hashCode = prime * hashCode + ((getReason() == null) ? 0 : getReason().hashCode());
hashCode = prime * hashCode + ((getNetworkBindings() == null) ? 0 : getNetworkBindings().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof SubmitContainerStateChangeRequest == false) return false;
SubmitContainerStateChangeRequest other = (SubmitContainerStateChangeRequest)obj;
if (other.getCluster() == null ^ this.getCluster() == null) return false;
if (other.getCluster() != null && other.getCluster().equals(this.getCluster()) == false) return false;
if (other.getTask() == null ^ this.getTask() == null) return false;
if (other.getTask() != null && other.getTask().equals(this.getTask()) == false) return false;
if (other.getContainerName() == null ^ this.getContainerName() == null) return false;
if (other.getContainerName() != null && other.getContainerName().equals(this.getContainerName()) == false) return false;
if (other.getStatus() == null ^ this.getStatus() == null) return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false;
if (other.getExitCode() == null ^ this.getExitCode() == null) return false;
if (other.getExitCode() != null && other.getExitCode().equals(this.getExitCode()) == false) return false;
if (other.getReason() == null ^ this.getReason() == null) return false;
if (other.getReason() != null && other.getReason().equals(this.getReason()) == false) return false;
if (other.getNetworkBindings() == null ^ this.getNetworkBindings() == null) return false;
if (other.getNetworkBindings() != null && other.getNetworkBindings().equals(this.getNetworkBindings()) == false) return false;
return true;
}
@Override
public SubmitContainerStateChangeRequest clone() {
return (SubmitContainerStateChangeRequest) super.clone();
}
}
|
|
package org.jivesoftware.openfire.admin.setup;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import org.jivesoftware.util.LocaleUtils;
import java.util.Map;
import org.jivesoftware.openfire.clearspace.ClearspaceManager;
import java.net.UnknownHostException;
import javax.net.ssl.SSLException;
import org.jivesoftware.openfire.clearspace.ConnectionException;
public final class setup_002dclearspace_002dintegration_005ftest_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static java.util.List _jspx_dependants;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_fmt_message_key_nobody;
public Object getDependants() {
return _jspx_dependants;
}
public void _jspInit() {
_jspx_tagPool_fmt_message_key_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
}
public void _jspDestroy() {
_jspx_tagPool_fmt_message_key_nobody.release();
}
public void _jspService(HttpServletRequest request, HttpServletResponse response)
throws java.io.IOException, ServletException {
JspFactory _jspxFactory = null;
PageContext pageContext = null;
HttpSession session = null;
ServletContext application = null;
ServletConfig config = null;
JspWriter out = null;
Object page = this;
JspWriter _jspx_out = null;
PageContext _jspx_page_context = null;
try {
_jspxFactory = JspFactory.getDefaultFactory();
response.setContentType("text/html");
pageContext = _jspxFactory.getPageContext(this, request, response,
null, true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write("\n\n\n\n\n\n\n\n\n\n");
boolean success = false;
Throwable exception = null;
String errorDetail = "";
String exceptionDetail = "";
Map<String, String> settings = (Map<String, String>) session.getAttribute("clearspaceSettings");
if (settings != null) {
ClearspaceManager manager = new ClearspaceManager(settings);
exception = manager.testConnection();
if (exception == null) {
success = true;
}
else {
if (exception instanceof ConnectionException) {
ConnectionException connException = (ConnectionException) exception;
switch (connException.getErrorType()) {
case AUTHENTICATION:
errorDetail = LocaleUtils.getLocalizedString("setup.clearspace.service.test.error-authentication");
break;
case PAGE_NOT_FOUND:
errorDetail = LocaleUtils.getLocalizedString("setup.clearspace.service.test.error-pageNotFound");
break;
case SERVICE_NOT_AVAIBLE:
errorDetail = LocaleUtils.getLocalizedString("setup.clearspace.service.test.error-serviceNotAvaitble");
break;
case UPDATE_STATE:
errorDetail = LocaleUtils.getLocalizedString("setup.clearspace.service.test.error-updateState");
break;
case UNKNOWN_HOST:
errorDetail = LocaleUtils.getLocalizedString("setup.clearspace.service.test.error-unknownHost");
break;
case OTHER:
errorDetail = LocaleUtils.getLocalizedString("setup.clearspace.service.test.error-connection");
break;
}
} else {
errorDetail = LocaleUtils.getLocalizedString("setup.clearspace.service.test.error-connection");
}
if (exception.getCause() != null) {
if (exception.getCause() instanceof UnknownHostException) {
exceptionDetail = exception.toString();
}
else {
exceptionDetail = exception.getCause().getMessage();
}
} else {
exceptionDetail = exception.getMessage();
}
}
}
out.write("\n <!-- BEGIN connection settings test panel -->\n\t<div class=\"jive-testPanel\">\n\t\t<div class=\"jive-testPanel-content\">\n\n\t\t\t<div align=\"right\" class=\"jive-testPanel-close\">\n\t\t\t\t<a href=\"#\" class=\"lbAction\" rel=\"deactivate\">");
if (_jspx_meth_fmt_message_0(_jspx_page_context))
return;
out.write("</a>\n\t\t\t</div>\n\n\n\t\t\t<h2>");
if (_jspx_meth_fmt_message_1(_jspx_page_context))
return;
out.write(": <span>");
if (_jspx_meth_fmt_message_2(_jspx_page_context))
return;
out.write("</span></h2>\n ");
if (success) {
out.write("\n <h4 class=\"jive-testSuccess\">");
if (_jspx_meth_fmt_message_3(_jspx_page_context))
return;
out.write("</h4>\n\n\t\t\t<p>");
if (_jspx_meth_fmt_message_4(_jspx_page_context))
return;
out.write("</p>\n ");
} else {
out.write("\n <h4 class=\"jive-testError\">");
if (_jspx_meth_fmt_message_5(_jspx_page_context))
return;
out.write("</h4>\n <p>");
out.print( errorDetail );
out.write("</p>\n <p><b>");
if (_jspx_meth_fmt_message_6(_jspx_page_context))
return;
out.write("</b></p>\n <p>");
out.print( exceptionDetail );
out.write("</p>\n ");
}
out.write("\n\n </div>\n\t</div>\n\t<!-- END connection settings test panel -->");
} catch (Throwable t) {
if (!(t instanceof SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
out.clearBuffer();
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
}
} finally {
if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context);
}
}
private boolean _jspx_meth_fmt_message_0(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_0 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_0.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_0.setParent(null);
_jspx_th_fmt_message_0.setKey("setup.clearspace.service.test.close");
int _jspx_eval_fmt_message_0 = _jspx_th_fmt_message_0.doStartTag();
if (_jspx_th_fmt_message_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_0);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_0);
return false;
}
private boolean _jspx_meth_fmt_message_1(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_1 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_1.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_1.setParent(null);
_jspx_th_fmt_message_1.setKey("setup.clearspace.service.test.title");
int _jspx_eval_fmt_message_1 = _jspx_th_fmt_message_1.doStartTag();
if (_jspx_th_fmt_message_1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_1);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_1);
return false;
}
private boolean _jspx_meth_fmt_message_2(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_2 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_2.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_2.setParent(null);
_jspx_th_fmt_message_2.setKey("setup.clearspace.service.test.title-desc");
int _jspx_eval_fmt_message_2 = _jspx_th_fmt_message_2.doStartTag();
if (_jspx_th_fmt_message_2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_2);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_2);
return false;
}
private boolean _jspx_meth_fmt_message_3(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_3 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_3.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_3.setParent(null);
_jspx_th_fmt_message_3.setKey("setup.clearspace.service.test.status-success");
int _jspx_eval_fmt_message_3 = _jspx_th_fmt_message_3.doStartTag();
if (_jspx_th_fmt_message_3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_3);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_3);
return false;
}
private boolean _jspx_meth_fmt_message_4(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_4 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_4.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_4.setParent(null);
_jspx_th_fmt_message_4.setKey("setup.clearspace.service.test.status-success.detail");
int _jspx_eval_fmt_message_4 = _jspx_th_fmt_message_4.doStartTag();
if (_jspx_th_fmt_message_4.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_4);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_4);
return false;
}
private boolean _jspx_meth_fmt_message_5(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_5 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_5.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_5.setParent(null);
_jspx_th_fmt_message_5.setKey("setup.clearspace.service.test.status-error");
int _jspx_eval_fmt_message_5 = _jspx_th_fmt_message_5.doStartTag();
if (_jspx_th_fmt_message_5.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_5);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_5);
return false;
}
private boolean _jspx_meth_fmt_message_6(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_6 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_6.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_6.setParent(null);
_jspx_th_fmt_message_6.setKey("setup.clearspace.service.test.error-detail");
int _jspx_eval_fmt_message_6 = _jspx_th_fmt_message_6.doStartTag();
if (_jspx_th_fmt_message_6.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_6);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_6);
return false;
}
}
|
|
package de.danoeh.antennapod.core.service.download;
import android.annotation.SuppressLint;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.Service;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.MediaMetadataRetriever;
import android.os.Binder;
import android.os.Handler;
import android.os.IBinder;
import android.support.v4.app.NotificationCompat;
import android.support.v4.util.Pair;
import android.util.Log;
import android.webkit.URLUtil;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.http.HttpStatus;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import javax.xml.parsers.ParserConfigurationException;
import de.danoeh.antennapod.core.ClientConfig;
import de.danoeh.antennapod.core.R;
import de.danoeh.antennapod.core.feed.EventDistributor;
import de.danoeh.antennapod.core.feed.Feed;
import de.danoeh.antennapod.core.feed.FeedImage;
import de.danoeh.antennapod.core.feed.FeedItem;
import de.danoeh.antennapod.core.feed.FeedMedia;
import de.danoeh.antennapod.core.feed.FeedPreferences;
import de.danoeh.antennapod.core.gpoddernet.model.GpodnetEpisodeAction;
import de.danoeh.antennapod.core.gpoddernet.model.GpodnetEpisodeAction.Action;
import de.danoeh.antennapod.core.preferences.GpodnetPreferences;
import de.danoeh.antennapod.core.preferences.UserPreferences;
import de.danoeh.antennapod.core.storage.DBReader;
import de.danoeh.antennapod.core.storage.DBTasks;
import de.danoeh.antennapod.core.storage.DBWriter;
import de.danoeh.antennapod.core.storage.DownloadRequestException;
import de.danoeh.antennapod.core.storage.DownloadRequester;
import de.danoeh.antennapod.core.syndication.handler.FeedHandler;
import de.danoeh.antennapod.core.syndication.handler.FeedHandlerResult;
import de.danoeh.antennapod.core.syndication.handler.UnsupportedFeedtypeException;
import de.danoeh.antennapod.core.util.ChapterUtils;
import de.danoeh.antennapod.core.util.DownloadError;
import de.danoeh.antennapod.core.util.InvalidFeedException;
/**
* Manages the download of feedfiles in the app. Downloads can be enqueued viathe startService intent.
* The argument of the intent is an instance of DownloadRequest in the EXTRA_REQUEST field of
* the intent.
* After the downloads have finished, the downloaded object will be passed on to a specific handler, depending on the
* type of the feedfile.
*/
public class DownloadService extends Service {
private static final String TAG = "DownloadService";
/**
* Cancels one download. The intent MUST have an EXTRA_DOWNLOAD_URL extra that contains the download URL of the
* object whose download should be cancelled.
*/
public static final String ACTION_CANCEL_DOWNLOAD = "action.de.danoeh.antennapod.core.service.cancelDownload";
/**
* Cancels all running downloads.
*/
public static final String ACTION_CANCEL_ALL_DOWNLOADS = "action.de.danoeh.antennapod.core.service.cancelAllDownloads";
/**
* Extra for ACTION_CANCEL_DOWNLOAD
*/
public static final String EXTRA_DOWNLOAD_URL = "downloadUrl";
/**
* Sent by the DownloadService when the content of the downloads list
* changes.
*/
public static final String ACTION_DOWNLOADS_CONTENT_CHANGED = "action.de.danoeh.antennapod.core.service.downloadsContentChanged";
/**
* Extra for ACTION_ENQUEUE_DOWNLOAD intent.
*/
public static final String EXTRA_REQUEST = "request";
/**
* Stores new media files that will be queued for auto-download if possible.
*/
private List<Long> newMediaFiles;
/**
* Contains all completed downloads that have not been included in the report yet.
*/
private List<DownloadStatus> reportQueue;
private ExecutorService syncExecutor;
private CompletionService<Downloader> downloadExecutor;
private FeedSyncThread feedSyncThread;
/**
* Number of threads of downloadExecutor.
*/
private static final int NUM_PARALLEL_DOWNLOADS = 6;
private DownloadRequester requester;
private NotificationCompat.Builder notificationCompatBuilder;
private Notification.BigTextStyle notificationBuilder;
private int NOTIFICATION_ID = 2;
private int REPORT_ID = 3;
/**
* Currently running downloads.
*/
private List<Downloader> downloads;
/**
* Number of running downloads.
*/
private AtomicInteger numberOfDownloads;
/**
* True if service is running.
*/
public static boolean isRunning = false;
private Handler handler;
private NotificationUpdater notificationUpdater;
private ScheduledFuture notificationUpdaterFuture;
private static final int SCHED_EX_POOL_SIZE = 1;
private ScheduledThreadPoolExecutor schedExecutor;
private final IBinder mBinder = new LocalBinder();
public class LocalBinder extends Binder {
public DownloadService getService() {
return DownloadService.this;
}
}
private Thread downloadCompletionThread = new Thread() {
private static final String TAG = "downloadCompletionThread";
@Override
public void run() {
Log.d(TAG, "downloadCompletionThread was started");
while (!isInterrupted()) {
try {
Downloader downloader = downloadExecutor.take().get();
Log.d(TAG, "Received 'Download Complete' - message.");
removeDownload(downloader);
DownloadStatus status = downloader.getResult();
boolean successful = status.isSuccessful();
final int type = status.getFeedfileType();
if (successful) {
if (type == Feed.FEEDFILETYPE_FEED) {
handleCompletedFeedDownload(downloader
.getDownloadRequest());
} else if (type == FeedImage.FEEDFILETYPE_FEEDIMAGE) {
handleCompletedImageDownload(status, downloader.getDownloadRequest());
} else if (type == FeedMedia.FEEDFILETYPE_FEEDMEDIA) {
handleCompletedFeedMediaDownload(status, downloader.getDownloadRequest());
}
} else {
numberOfDownloads.decrementAndGet();
if (!status.isCancelled()) {
if (status.getReason() == DownloadError.ERROR_UNAUTHORIZED) {
postAuthenticationNotification(downloader.getDownloadRequest());
} else if (status.getReason() == DownloadError.ERROR_HTTP_DATA_ERROR
&& Integer.valueOf(status.getReasonDetailed()) == HttpStatus.SC_REQUESTED_RANGE_NOT_SATISFIABLE) {
Log.d(TAG, "Requested invalid range, restarting download from the beginning");
FileUtils.deleteQuietly(new File(downloader.getDownloadRequest().getDestination()));
DownloadRequester.getInstance().download(DownloadService.this, downloader.getDownloadRequest());
} else {
Log.e(TAG, "Download failed");
saveDownloadStatus(status);
handleFailedDownload(status, downloader.getDownloadRequest());
}
}
sendDownloadHandledIntent();
queryDownloadsAsync();
}
} catch (InterruptedException e) {
Log.d(TAG, "DownloadCompletionThread was interrupted");
} catch (ExecutionException e) {
e.printStackTrace();
numberOfDownloads.decrementAndGet();
}
}
Log.d(TAG, "End of downloadCompletionThread");
}
};
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent.getParcelableExtra(EXTRA_REQUEST) != null) {
onDownloadQueued(intent);
} else if (numberOfDownloads.get() == 0) {
stopSelf();
}
return Service.START_NOT_STICKY;
}
@SuppressLint("NewApi")
@Override
public void onCreate() {
Log.d(TAG, "Service started");
isRunning = true;
handler = new Handler();
newMediaFiles = Collections.synchronizedList(new ArrayList<Long>());
reportQueue = Collections.synchronizedList(new ArrayList<DownloadStatus>());
downloads = new ArrayList<Downloader>();
numberOfDownloads = new AtomicInteger(0);
IntentFilter cancelDownloadReceiverFilter = new IntentFilter();
cancelDownloadReceiverFilter.addAction(ACTION_CANCEL_ALL_DOWNLOADS);
cancelDownloadReceiverFilter.addAction(ACTION_CANCEL_DOWNLOAD);
registerReceiver(cancelDownloadReceiver, cancelDownloadReceiverFilter);
syncExecutor = Executors.newSingleThreadExecutor(new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setPriority(Thread.MIN_PRIORITY);
return t;
}
});
Log.d(TAG, "parallel downloads: " + UserPreferences.getParallelDownloads());
downloadExecutor = new ExecutorCompletionService<Downloader>(
Executors.newFixedThreadPool(UserPreferences.getParallelDownloads(),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setPriority(Thread.MIN_PRIORITY);
return t;
}
}
)
);
schedExecutor = new ScheduledThreadPoolExecutor(SCHED_EX_POOL_SIZE,
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setPriority(Thread.MIN_PRIORITY);
return t;
}
}, new RejectedExecutionHandler() {
@Override
public void rejectedExecution(Runnable r,
ThreadPoolExecutor executor) {
Log.w(TAG, "SchedEx rejected submission of new task");
}
}
);
downloadCompletionThread.start();
feedSyncThread = new FeedSyncThread();
feedSyncThread.start();
setupNotificationBuilders();
requester = DownloadRequester.getInstance();
}
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
@Override
public void onDestroy() {
Log.d(TAG, "Service shutting down");
isRunning = false;
if (ClientConfig.downloadServiceCallbacks.shouldCreateReport()) {
updateReport();
}
stopForeground(true);
NotificationManager nm = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
nm.cancel(NOTIFICATION_ID);
downloadCompletionThread.interrupt();
syncExecutor.shutdown();
schedExecutor.shutdown();
feedSyncThread.shutdown();
cancelNotificationUpdater();
unregisterReceiver(cancelDownloadReceiver);
if (!newMediaFiles.isEmpty()) {
DBTasks.autodownloadUndownloadedItems(getApplicationContext(),
ArrayUtils.toPrimitive(newMediaFiles.toArray(new Long[newMediaFiles.size()])));
}
}
@SuppressLint("NewApi")
private void setupNotificationBuilders() {
Bitmap icon = BitmapFactory.decodeResource(getResources(),
R.drawable.stat_notify_sync);
if (android.os.Build.VERSION.SDK_INT >= 16) {
notificationBuilder = new Notification.BigTextStyle(
new Notification.Builder(this).setOngoing(true)
.setContentIntent(ClientConfig.downloadServiceCallbacks.getNotificationContentIntent(this)).setLargeIcon(icon)
.setSmallIcon(R.drawable.stat_notify_sync)
);
} else {
notificationCompatBuilder = new NotificationCompat.Builder(this)
.setOngoing(true).setContentIntent(ClientConfig.downloadServiceCallbacks.getNotificationContentIntent(this))
.setLargeIcon(icon)
.setSmallIcon(R.drawable.stat_notify_sync);
}
Log.d(TAG, "Notification set up");
}
/**
* Updates the contents of the service's notifications. Should be called
* before setupNotificationBuilders.
*/
@SuppressLint("NewApi")
private Notification updateNotifications() {
String contentTitle = getString(R.string.download_notification_title);
int numDownloads = requester.getNumberOfDownloads();
String downloadsLeft;
if (numDownloads > 0) {
downloadsLeft = requester.getNumberOfDownloads()
+ getString(R.string.downloads_left);
} else {
downloadsLeft = getString(R.string.downloads_processing);
}
if (android.os.Build.VERSION.SDK_INT >= 16) {
if (notificationBuilder != null) {
StringBuilder bigText = new StringBuilder("");
for (int i = 0; i < downloads.size(); i++) {
Downloader downloader = downloads.get(i);
final DownloadRequest request = downloader
.getDownloadRequest();
if (request.getFeedfileType() == Feed.FEEDFILETYPE_FEED) {
if (request.getTitle() != null) {
if (i > 0) {
bigText.append("\n");
}
bigText.append("\u2022 " + request.getTitle());
}
} else if (request.getFeedfileType() == FeedMedia.FEEDFILETYPE_FEEDMEDIA) {
if (request.getTitle() != null) {
if (i > 0) {
bigText.append("\n");
}
bigText.append("\u2022 " + request.getTitle()
+ " (" + request.getProgressPercent()
+ "%)");
}
}
}
notificationBuilder.setSummaryText(downloadsLeft);
notificationBuilder.setBigContentTitle(contentTitle);
if (bigText != null) {
notificationBuilder.bigText(bigText.toString());
}
return notificationBuilder.build();
}
} else {
if (notificationCompatBuilder != null) {
notificationCompatBuilder.setContentTitle(contentTitle);
notificationCompatBuilder.setContentText(downloadsLeft);
return notificationCompatBuilder.build();
}
}
return null;
}
private Downloader getDownloader(String downloadUrl) {
for (Downloader downloader : downloads) {
if (downloader.getDownloadRequest().getSource().equals(downloadUrl)) {
return downloader;
}
}
return null;
}
private BroadcastReceiver cancelDownloadReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (StringUtils.equals(intent.getAction(), ACTION_CANCEL_DOWNLOAD)) {
String url = intent.getStringExtra(EXTRA_DOWNLOAD_URL);
Validate.notNull(url, "ACTION_CANCEL_DOWNLOAD intent needs download url extra");
Log.d(TAG, "Cancelling download with url " + url);
Downloader d = getDownloader(url);
if (d != null) {
d.cancel();
} else {
Log.e(TAG, "Could not cancel download with url " + url);
}
} else if (StringUtils.equals(intent.getAction(), ACTION_CANCEL_ALL_DOWNLOADS)) {
for (Downloader d : downloads) {
d.cancel();
Log.d(TAG, "Cancelled all downloads");
}
sendBroadcast(new Intent(ACTION_DOWNLOADS_CONTENT_CHANGED));
}
queryDownloads();
}
};
private void onDownloadQueued(Intent intent) {
Log.d(TAG, "Received enqueue request");
DownloadRequest request = intent.getParcelableExtra(EXTRA_REQUEST);
if (request == null) {
throw new IllegalArgumentException(
"ACTION_ENQUEUE_DOWNLOAD intent needs request extra");
}
Downloader downloader = getDownloader(request);
if (downloader != null) {
numberOfDownloads.incrementAndGet();
// smaller rss feeds before bigger media files
if(request.getFeedfileId() == Feed.FEEDFILETYPE_FEED) {
downloads.add(0, downloader);
} else {
downloads.add(downloader);
}
downloadExecutor.submit(downloader);
sendBroadcast(new Intent(ACTION_DOWNLOADS_CONTENT_CHANGED));
}
queryDownloads();
}
private Downloader getDownloader(DownloadRequest request) {
if (URLUtil.isHttpUrl(request.getSource())
|| URLUtil.isHttpsUrl(request.getSource())) {
return new HttpDownloader(request);
}
Log.e(TAG,
"Could not find appropriate downloader for "
+ request.getSource()
);
return null;
}
/**
* Remove download from the DownloadRequester list and from the
* DownloadService list.
*/
private void removeDownload(final Downloader d) {
handler.post(new Runnable() {
@Override
public void run() {
Log.d(TAG, "Removing downloader: "
+ d.getDownloadRequest().getSource());
boolean rc = downloads.remove(d);
Log.d(TAG, "Result of downloads.remove: " + rc);
DownloadRequester.getInstance().removeDownload(d.getDownloadRequest());
sendBroadcast(new Intent(ACTION_DOWNLOADS_CONTENT_CHANGED));
}
});
}
/**
* Adds a new DownloadStatus object to the list of completed downloads and
* saves it in the database
*
* @param status the download that is going to be saved
*/
private void saveDownloadStatus(DownloadStatus status) {
reportQueue.add(status);
DBWriter.addDownloadStatus(this, status);
}
private void sendDownloadHandledIntent() {
EventDistributor.getInstance().sendDownloadHandledBroadcast();
}
/**
* Creates a notification at the end of the service lifecycle to notify the
* user about the number of completed downloads. A report will only be
* created if there is at least one failed download excluding images
*/
private void updateReport() {
// check if report should be created
boolean createReport = false;
int successfulDownloads = 0;
int failedDownloads = 0;
// a download report is created if at least one download has failed
// (excluding failed image downloads)
for (DownloadStatus status : reportQueue) {
if (status.isSuccessful()) {
successfulDownloads++;
} else if (!status.isCancelled()) {
if (status.getFeedfileType() != FeedImage.FEEDFILETYPE_FEEDIMAGE) {
createReport = true;
}
failedDownloads++;
}
}
if (createReport) {
Log.d(TAG, "Creating report");
// create notification object
Notification notification = new NotificationCompat.Builder(this)
.setTicker(
getString(R.string.download_report_title))
.setContentTitle(
getString(R.string.download_report_content_title))
.setContentText(
String.format(
getString(R.string.download_report_content),
successfulDownloads, failedDownloads)
)
.setSmallIcon(R.drawable.stat_notify_sync_error)
.setLargeIcon(
BitmapFactory.decodeResource(getResources(),
R.drawable.stat_notify_sync_error)
)
.setContentIntent(
ClientConfig.downloadServiceCallbacks.getReportNotificationContentIntent(this)
)
.setAutoCancel(true).build();
NotificationManager nm = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
nm.notify(REPORT_ID, notification);
} else {
Log.d(TAG, "No report is created");
}
reportQueue.clear();
}
/**
* Calls query downloads on the services main thread. This method should be used instead of queryDownloads if it is
* used from a thread other than the main thread.
*/
void queryDownloadsAsync() {
handler.post(new Runnable() {
public void run() {
queryDownloads();
;
}
});
}
/**
* Check if there's something else to download, otherwise stop
*/
void queryDownloads() {
Log.d(TAG, numberOfDownloads.get() + " downloads left");
if (numberOfDownloads.get() <= 0 && DownloadRequester.getInstance().hasNoDownloads()) {
Log.d(TAG, "Number of downloads is " + numberOfDownloads.get() + ", attempting shutdown");
stopSelf();
} else {
setupNotificationUpdater();
startForeground(NOTIFICATION_ID, updateNotifications());
}
}
private void postAuthenticationNotification(final DownloadRequest downloadRequest) {
handler.post(new Runnable() {
@Override
public void run() {
final String resourceTitle = (downloadRequest.getTitle() != null)
? downloadRequest.getTitle() : downloadRequest.getSource();
NotificationCompat.Builder builder = new NotificationCompat.Builder(DownloadService.this);
builder.setTicker(getText(R.string.authentication_notification_title))
.setContentTitle(getText(R.string.authentication_notification_title))
.setContentText(getText(R.string.authentication_notification_msg))
.setStyle(new NotificationCompat.BigTextStyle().bigText(getText(R.string.authentication_notification_msg)
+ ": " + resourceTitle))
.setSmallIcon(R.drawable.ic_stat_authentication)
.setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_stat_authentication))
.setAutoCancel(true)
.setContentIntent(ClientConfig.downloadServiceCallbacks.getAuthentificationNotificationContentIntent(DownloadService.this, downloadRequest));
Notification n = builder.build();
NotificationManager nm = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
nm.notify(downloadRequest.getSource().hashCode(), n);
}
});
}
/**
* Is called whenever a Feed is downloaded
*/
private void handleCompletedFeedDownload(DownloadRequest request) {
Log.d(TAG, "Handling completed Feed Download");
feedSyncThread.submitCompletedDownload(request);
}
/**
* Is called whenever a Feed-Image is downloaded
*/
private void handleCompletedImageDownload(DownloadStatus status, DownloadRequest request) {
Log.d(TAG, "Handling completed Image Download");
syncExecutor.execute(new ImageHandlerThread(status, request));
}
/**
* Is called whenever a FeedMedia is downloaded.
*/
private void handleCompletedFeedMediaDownload(DownloadStatus status, DownloadRequest request) {
Log.d(TAG, "Handling completed FeedMedia Download");
syncExecutor.execute(new MediaHandlerThread(status, request));
}
private void handleFailedDownload(DownloadStatus status, DownloadRequest request) {
Log.d(TAG, "Handling failed download");
syncExecutor.execute(new FailedDownloadHandler(status, request));
}
/**
* Takes a single Feed, parses the corresponding file and refreshes
* information in the manager
*/
class FeedSyncThread extends Thread {
private static final String TAG = "FeedSyncThread";
private BlockingQueue<DownloadRequest> completedRequests = new LinkedBlockingDeque<DownloadRequest>();
private CompletionService<Pair<DownloadRequest, FeedHandlerResult>> parserService = new ExecutorCompletionService<Pair<DownloadRequest, FeedHandlerResult>>(Executors.newSingleThreadExecutor());
private ExecutorService dbService = Executors.newSingleThreadExecutor();
private Future<?> dbUpdateFuture;
private volatile boolean isActive = true;
private volatile boolean isCollectingRequests = false;
private final long WAIT_TIMEOUT = 3000;
/**
* Waits for completed requests. Once the first request has been taken, the method will wait WAIT_TIMEOUT ms longer to
* collect more completed requests.
*
* @return Collected feeds or null if the method has been interrupted during the first waiting period.
*/
private List<Pair<DownloadRequest, FeedHandlerResult>> collectCompletedRequests() {
List<Pair<DownloadRequest, FeedHandlerResult>> results = new LinkedList<Pair<DownloadRequest, FeedHandlerResult>>();
DownloadRequester requester = DownloadRequester.getInstance();
int tasks = 0;
try {
DownloadRequest request = completedRequests.take();
parserService.submit(new FeedParserTask(request));
tasks++;
} catch (InterruptedException e) {
return null;
}
tasks += pollCompletedDownloads();
isCollectingRequests = true;
if (requester.isDownloadingFeeds()) {
// wait for completion of more downloads
long startTime = System.currentTimeMillis();
long currentTime = startTime;
while (requester.isDownloadingFeeds() && (currentTime - startTime) < WAIT_TIMEOUT) {
try {
Log.d(TAG, "Waiting for " + (startTime + WAIT_TIMEOUT - currentTime) + " ms");
sleep(startTime + WAIT_TIMEOUT - currentTime);
} catch (InterruptedException e) {
Log.d(TAG, "interrupted while waiting for more downloads");
tasks += pollCompletedDownloads();
} finally {
currentTime = System.currentTimeMillis();
}
}
tasks += pollCompletedDownloads();
}
isCollectingRequests = false;
for (int i = 0; i < tasks; i++) {
try {
Pair<DownloadRequest, FeedHandlerResult> result = parserService.take().get();
if (result != null) {
results.add(result);
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
return results;
}
private int pollCompletedDownloads() {
int tasks = 0;
for (int i = 0; i < completedRequests.size(); i++) {
parserService.submit(new FeedParserTask(completedRequests.poll()));
tasks++;
}
return tasks;
}
@Override
public void run() {
while (isActive) {
final List<Pair<DownloadRequest, FeedHandlerResult>> results = collectCompletedRequests();
if (results == null) {
continue;
}
Log.d(TAG, "Bundling " + results.size() + " feeds");
for (Pair<DownloadRequest, FeedHandlerResult> result : results) {
removeDuplicateImages(result.second.feed); // duplicate images have to removed because the DownloadRequester does not accept two downloads with the same download URL yet.
}
// Save information of feed in DB
if (dbUpdateFuture != null) {
try {
dbUpdateFuture.get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
dbUpdateFuture = dbService.submit(new Runnable() {
@Override
public void run() {
Feed[] savedFeeds = DBTasks.updateFeed(DownloadService.this, getFeeds(results));
for (int i = 0; i < savedFeeds.length; i++) {
Feed savedFeed = savedFeeds[i];
// Download Feed Image if provided and not downloaded
if (savedFeed.getImage() != null
&& savedFeed.getImage().isDownloaded() == false) {
Log.d(TAG, "Feed has image; Downloading....");
savedFeed.getImage().setOwner(savedFeed);
final Feed savedFeedRef = savedFeed;
try {
requester.downloadImage(DownloadService.this,
savedFeedRef.getImage());
} catch (DownloadRequestException e) {
e.printStackTrace();
DBWriter.addDownloadStatus(
DownloadService.this,
new DownloadStatus(
savedFeedRef.getImage(),
savedFeedRef
.getImage()
.getHumanReadableIdentifier(),
DownloadError.ERROR_REQUEST_ERROR,
false, e.getMessage()
)
);
}
}
// queue new media files for automatic download
for (FeedItem item : savedFeed.getItems()) {
if(item.getPubDate() == null) {
Log.d(TAG, item.toString());
}
if(item.getImage() != null && item.getImage().isDownloaded() == false) {
item.getImage().setOwner(item);
try {
requester.downloadImage(DownloadService.this,
item.getImage());
} catch (DownloadRequestException e) {
e.printStackTrace();
}
}
if (!item.isRead() && item.hasMedia() && !item.getMedia().isDownloaded()) {
newMediaFiles.add(item.getMedia().getId());
}
}
// If loadAllPages=true, check if another page is available and queue it for download
final boolean loadAllPages = results.get(i).first.getArguments().getBoolean(DownloadRequester.REQUEST_ARG_LOAD_ALL_PAGES);
final Feed feed = results.get(i).second.feed;
if (loadAllPages && feed.getNextPageLink() != null) {
try {
feed.setId(savedFeed.getId());
DBTasks.loadNextPageOfFeed(DownloadService.this, savedFeed, true);
} catch (DownloadRequestException e) {
Log.e(TAG, "Error trying to load next page", e);
}
}
ClientConfig.downloadServiceCallbacks.onFeedParsed(DownloadService.this,
savedFeed);
numberOfDownloads.decrementAndGet();
}
sendDownloadHandledIntent();
queryDownloadsAsync();
}
});
}
if (dbUpdateFuture != null) {
try {
dbUpdateFuture.get();
} catch (InterruptedException e) {
} catch (ExecutionException e) {
e.printStackTrace();
}
}
Log.d(TAG, "Shutting down");
}
/**
* Helper method
*/
private Feed[] getFeeds(List<Pair<DownloadRequest, FeedHandlerResult>> results) {
Feed[] feeds = new Feed[results.size()];
for (int i = 0; i < results.size(); i++) {
feeds[i] = results.get(i).second.feed;
}
return feeds;
}
private class FeedParserTask implements Callable<Pair<DownloadRequest, FeedHandlerResult>> {
private DownloadRequest request;
private FeedParserTask(DownloadRequest request) {
this.request = request;
}
@Override
public Pair<DownloadRequest, FeedHandlerResult> call() throws Exception {
return parseFeed(request);
}
}
private Pair<DownloadRequest, FeedHandlerResult> parseFeed(DownloadRequest request) {
Feed feed = new Feed(request.getSource(), new Date());
feed.setFile_url(request.getDestination());
feed.setId(request.getFeedfileId());
feed.setDownloaded(true);
feed.setPreferences(new FeedPreferences(0, true,
request.getUsername(), request.getPassword()));
feed.setPageNr(request.getArguments().getInt(DownloadRequester.REQUEST_ARG_PAGE_NR, 0));
DownloadError reason = null;
String reasonDetailed = null;
boolean successful = true;
FeedHandler feedHandler = new FeedHandler();
FeedHandlerResult result = null;
try {
result = feedHandler.parseFeed(feed);
Log.d(TAG, feed.getTitle() + " parsed");
if (checkFeedData(feed) == false) {
throw new InvalidFeedException();
}
} catch (SAXException e) {
successful = false;
e.printStackTrace();
reason = DownloadError.ERROR_PARSER_EXCEPTION;
reasonDetailed = e.getMessage();
} catch (IOException e) {
successful = false;
e.printStackTrace();
reason = DownloadError.ERROR_PARSER_EXCEPTION;
reasonDetailed = e.getMessage();
} catch (ParserConfigurationException e) {
successful = false;
e.printStackTrace();
reason = DownloadError.ERROR_PARSER_EXCEPTION;
reasonDetailed = e.getMessage();
} catch (UnsupportedFeedtypeException e) {
e.printStackTrace();
successful = false;
reason = DownloadError.ERROR_UNSUPPORTED_TYPE;
reasonDetailed = e.getMessage();
} catch (InvalidFeedException e) {
e.printStackTrace();
successful = false;
reason = DownloadError.ERROR_PARSER_EXCEPTION;
reasonDetailed = e.getMessage();
}
// cleanup();
if (successful) {
// we create a 'successful' download log if the feed's last refresh failed
List<DownloadStatus> log = DBReader.getFeedDownloadLog(DownloadService.this, feed);
if(log.size() > 0 && log.get(0).isSuccessful() == false) {
saveDownloadStatus(new DownloadStatus(feed,
feed.getHumanReadableIdentifier(), DownloadError.SUCCESS, successful,
reasonDetailed));
}
return Pair.create(request, result);
} else {
numberOfDownloads.decrementAndGet();
saveDownloadStatus(new DownloadStatus(feed,
feed.getHumanReadableIdentifier(), reason, successful,
reasonDetailed));
return null;
}
}
/**
* Checks if the feed was parsed correctly.
*/
private boolean checkFeedData(Feed feed) {
if (feed.getTitle() == null) {
Log.e(TAG, "Feed has no title.");
return false;
}
if (!hasValidFeedItems(feed)) {
Log.e(TAG, "Feed has invalid items");
return false;
}
return true;
}
/**
* Checks if the FeedItems of this feed have images that point
* to the same URL. If two FeedItems have an image that points to
* the same URL, the reference of the second item is removed, so that every image
* reference is unique.
*/
private void removeDuplicateImages(Feed feed) {
for (int x = 0; x < feed.getItems().size(); x++) {
for (int y = x + 1; y < feed.getItems().size(); y++) {
FeedItem item1 = feed.getItems().get(x);
FeedItem item2 = feed.getItems().get(y);
if (item1.hasItemImage() && item2.hasItemImage()) {
if (StringUtils.equals(item1.getImage().getDownload_url(), item2.getImage().getDownload_url())) {
item2.setImage(null);
}
}
}
}
}
private boolean hasValidFeedItems(Feed feed) {
for (FeedItem item : feed.getItems()) {
if (item.getTitle() == null) {
Log.e(TAG, "Item has no title");
return false;
}
if (item.getPubDate() == null) {
Log.e(TAG,
"Item has no pubDate. Using current time as pubDate");
if (item.getTitle() != null) {
Log.e(TAG, "Title of invalid item: " + item.getTitle());
}
item.setPubDate(new Date());
}
}
return true;
}
/**
* Delete files that aren't needed anymore
*/
private void cleanup(Feed feed) {
if (feed.getFile_url() != null) {
if (new File(feed.getFile_url()).delete()) {
Log.d(TAG, "Successfully deleted cache file.");
} else {
Log.e(TAG, "Failed to delete cache file.");
}
feed.setFile_url(null);
} else {
Log.d(TAG, "Didn't delete cache file: File url is not set.");
}
}
public void shutdown() {
isActive = false;
if (isCollectingRequests) {
interrupt();
}
}
public void submitCompletedDownload(DownloadRequest request) {
completedRequests.offer(request);
if (isCollectingRequests) {
interrupt();
}
}
}
/**
* Handles failed downloads.
* <p/>
* If the file has been partially downloaded, this handler will set the file_url of the FeedFile to the location
* of the downloaded file.
* <p/>
* Currently, this handler only handles FeedMedia objects, because Feeds and FeedImages are deleted if the download fails.
*/
class FailedDownloadHandler implements Runnable {
private DownloadRequest request;
private DownloadStatus status;
FailedDownloadHandler(DownloadStatus status, DownloadRequest request) {
this.request = request;
this.status = status;
}
@Override
public void run() {
if(request.getFeedfileType() == Feed.FEEDFILETYPE_FEED) {
DBWriter.setFeedLastUpdateFailed(DownloadService.this, request.getFeedfileId(), true);
} else if (request.isDeleteOnFailure()) {
Log.d(TAG, "Ignoring failed download, deleteOnFailure=true");
} else {
File dest = new File(request.getDestination());
if (dest.exists() && request.getFeedfileType() == FeedMedia.FEEDFILETYPE_FEEDMEDIA) {
Log.d(TAG, "File has been partially downloaded. Writing file url");
FeedMedia media = DBReader.getFeedMedia(DownloadService.this, request.getFeedfileId());
media.setFile_url(request.getDestination());
try {
DBWriter.setFeedMedia(DownloadService.this, media).get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
}
}
}
/**
* Handles a completed image download.
*/
class ImageHandlerThread implements Runnable {
private DownloadRequest request;
private DownloadStatus status;
public ImageHandlerThread(DownloadStatus status, DownloadRequest request) {
Validate.notNull(status);
Validate.notNull(request);
this.status = status;
this.request = request;
}
@Override
public void run() {
FeedImage image = DBReader.getFeedImage(DownloadService.this, request.getFeedfileId());
if (image == null) {
throw new IllegalStateException("Could not find downloaded image in database");
}
image.setFile_url(request.getDestination());
image.setDownloaded(true);
saveDownloadStatus(status);
sendDownloadHandledIntent();
DBWriter.setFeedImage(DownloadService.this, image);
numberOfDownloads.decrementAndGet();
queryDownloadsAsync();
}
}
/**
* Handles a completed media download.
*/
class MediaHandlerThread implements Runnable {
private DownloadRequest request;
private DownloadStatus status;
public MediaHandlerThread(DownloadStatus status, DownloadRequest request) {
Validate.notNull(status);
Validate.notNull(request);
this.status = status;
this.request = request;
}
@Override
public void run() {
FeedMedia media = DBReader.getFeedMedia(DownloadService.this,
request.getFeedfileId());
if (media == null) {
throw new IllegalStateException(
"Could not find downloaded media object in database");
}
boolean chaptersRead = false;
media.setDownloaded(true);
media.setFile_url(request.getDestination());
// Get duration
MediaMetadataRetriever mmr = null;
try {
mmr = new MediaMetadataRetriever();
mmr.setDataSource(media.getFile_url());
String durationStr = mmr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
media.setDuration(Integer.parseInt(durationStr));
Log.d(TAG, "Duration of file is " + media.getDuration());
} catch (NumberFormatException e) {
e.printStackTrace();
} catch (RuntimeException e) {
e.printStackTrace();
} finally {
if (mmr != null) {
mmr.release();
}
}
if (media.getItem().getChapters() == null) {
ChapterUtils.loadChaptersFromFileUrl(media);
if (media.getItem().getChapters() != null) {
chaptersRead = true;
}
}
try {
if (chaptersRead) {
DBWriter.setFeedItem(DownloadService.this, media.getItem()).get();
}
DBWriter.setFeedMedia(DownloadService.this, media).get();
if (!DBTasks.isInQueue(DownloadService.this, media.getItem().getId())) {
DBWriter.addQueueItem(DownloadService.this, media.getItem().getId()).get();
}
} catch (ExecutionException e) {
e.printStackTrace();
status = new DownloadStatus(media, media.getEpisodeTitle(), DownloadError.ERROR_DB_ACCESS_ERROR, false, e.getMessage());
} catch (InterruptedException e) {
e.printStackTrace();
status = new DownloadStatus(media, media.getEpisodeTitle(), DownloadError.ERROR_DB_ACCESS_ERROR, false, e.getMessage());
}
saveDownloadStatus(status);
sendDownloadHandledIntent();
if(GpodnetPreferences.loggedIn()) {
FeedItem item = media.getItem();
GpodnetEpisodeAction action = new GpodnetEpisodeAction.Builder(item, Action.DOWNLOAD)
.currentDeviceId()
.currentTimestamp()
.build();
GpodnetPreferences.enqueueEpisodeAction(action);
}
numberOfDownloads.decrementAndGet();
queryDownloadsAsync();
}
}
/**
* Schedules the notification updater task if it hasn't been scheduled yet.
*/
private void setupNotificationUpdater() {
Log.d(TAG, "Setting up notification updater");
if (notificationUpdater == null) {
notificationUpdater = new NotificationUpdater();
notificationUpdaterFuture = schedExecutor.scheduleAtFixedRate(
notificationUpdater, 5L, 5L, TimeUnit.SECONDS);
}
}
private void cancelNotificationUpdater() {
boolean result = false;
if (notificationUpdaterFuture != null) {
result = notificationUpdaterFuture.cancel(true);
}
notificationUpdater = null;
notificationUpdaterFuture = null;
Log.d(TAG, "NotificationUpdater cancelled. Result: " + result);
}
private class NotificationUpdater implements Runnable {
public void run() {
handler.post(new Runnable() {
@Override
public void run() {
Notification n = updateNotifications();
if (n != null) {
NotificationManager nm = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
nm.notify(NOTIFICATION_ID, n);
}
}
});
}
}
public List<Downloader> getDownloads() {
return downloads;
}
}
|
|
/*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: 27.05.2013
*
*******************************************************************************/
package org.oscm.saml2.api;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.io.FileInputStream;
import java.util.Calendar;
import javax.xml.bind.DatatypeConverter;
import javax.xml.xpath.XPathExpressionException;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.oscm.converter.XMLConverter;
import org.oscm.internal.types.exception.AssertionValidationException;
/**
* @author kulle
*
*/
public class AssertionContentVerifierTest {
private final String FILE_UNSIGNED_ASSERTION = "javares/unsignedAssertion.xml";
private final String FILE_MISSING_CONFIRMATION_DATA = "javares/unsignedAssertion_noConfirmationData_noUserid.xml";
private AssertionContentVerifier verifier;
private Document assertion;
@Before
public void setup() throws Exception {
verifier = spy(new AssertionContentVerifier(new VerifierConfiguration(
"identifier_1", "", "", Calendar.getInstance(), "8f96dede")));
assertion = loadDocument(FILE_UNSIGNED_ASSERTION);
}
private Document loadDocument(String file) throws Exception {
FileInputStream inputStream = null;
Document document = null;
try {
inputStream = new FileInputStream(file);
document = XMLConverter.convertToDocument(inputStream);
} finally {
if (inputStream != null) {
inputStream.close();
}
}
return document;
}
@Test(expected = AssertionValidationException.class)
public void verifyConfirmationData_error() throws Exception {
// given
assertion = loadDocument(FILE_UNSIGNED_ASSERTION);
doThrow(new XPathExpressionException("")).when(verifier)
.loadConfirmationData(any(Node.class));
// when
verifier.verifyConfirmationData(assertion);
// then exception expected
}
@Test
public void verifyConfirmationData() throws Exception {
// given
assertion = loadDocument(FILE_UNSIGNED_ASSERTION);
doNothing().when(verifier).verifyRecipient(any(Node.class),
any(Node.class));
doNothing().when(verifier).verifyAssertionExpirationDate(
any(Node.class), any(Node.class));
doNothing().when(verifier).verifyInResponseTo(any(Node.class),
any(Node.class));
// when
verifier.verifyConfirmationData(assertion);
// then
verify(verifier, times(1)).verifyRecipient(any(Node.class),
any(Node.class));
verify(verifier, times(1)).verifyAssertionExpirationDate(
any(Node.class), any(Node.class));
verify(verifier, times(1)).verifyInResponseTo(any(Node.class),
any(Node.class));
}
@Test
public void verifyAssertionContent_delegate() throws Exception {
// given
Node node = null;
doNothing().when(verifier).verifyConfirmationData(any(Node.class));
// when
verifier.verifyAssertionContent(node);
// then
verify(verifier, times(1)).verifyConfirmationData(any(Node.class));
}
@Test
public void loadConfirmationData_missingConfirmationData() throws Exception {
// given
assertion = loadDocument(FILE_MISSING_CONFIRMATION_DATA);
Node nodeAssertion = getActualFirstChild(assertion);
// when
NodeList confirmationData = verifier
.loadConfirmationData(nodeAssertion);
// then
assertEquals(0, confirmationData.getLength());
}
@Test
public void loadConfirmationData() throws Exception {
// given
Node nodeAssertion = getActualFirstChild(assertion);
// when
NodeList confirmationData = verifier
.loadConfirmationData(nodeAssertion);
// then
assertEquals(1, confirmationData.getLength());
}
@Test
public void verifyRecipient() throws Exception {
// given
Node nodeAssertion = getActualFirstChild(assertion);
Node nodeConfirmationData = XMLConverter.getNodeByXPath(nodeAssertion,
"//saml2:SubjectConfirmationData");
verifier.acsUrl = XMLConverter.getStringAttValue(nodeConfirmationData,
SamlXmlTags.ATTRIBUTE_RECIPIENT);
// when
verifier.verifyRecipient(nodeAssertion, nodeConfirmationData);
// then no exception expected
}
@Test(expected = AssertionValidationException.class)
public void verifyRecipient_error() throws Exception {
// given
Node nodeAssertion = getActualFirstChild(assertion);
Node nodeConfirmationData = XMLConverter.getNodeByXPath(nodeAssertion,
"//saml2:SubjectConfirmationData");
verifier.acsUrl = "wrong url";
// when
verifier.verifyRecipient(nodeAssertion, nodeConfirmationData);
}
@Test
public void verifyAssertionExpirationDate() throws Exception {
// given
Node nodeAssertion = getActualFirstChild(assertion);
Node nodeConfirmationData = XMLConverter.getNodeByXPath(nodeAssertion,
"//saml2:SubjectConfirmationData");
Calendar now = Calendar.getInstance();
now.set(Calendar.YEAR, 2000);
verifier.now = now;
// when
verifier.verifyAssertionExpirationDate(nodeAssertion,
nodeConfirmationData);
// then no exception expected
}
@Test(expected = AssertionValidationException.class)
public void verifyAssertionExpirationDate_equals() throws Exception {
// given
Node nodeAssertion = getActualFirstChild(assertion);
Node nodeConfirmationData = XMLConverter.getNodeByXPath(nodeAssertion,
"//saml2:SubjectConfirmationData");
verifier.now = readExpirationDateFromXml(nodeConfirmationData);
// when
verifier.verifyAssertionExpirationDate(nodeAssertion,
nodeConfirmationData);
}
private Calendar readExpirationDateFromXml(Node nodeConfirmationData) {
return DatatypeConverter.parseDateTime(XMLConverter.getStringAttValue(
nodeConfirmationData, SamlXmlTags.ATTRIBUTE_NOT_ON_OR_AFTER));
}
@Test(expected = AssertionValidationException.class)
public void verifyAssertionExpirationDate_error() throws Exception {
// given
Node nodeAssertion = getActualFirstChild(assertion);
Node nodeConfirmationData = XMLConverter.getNodeByXPath(nodeAssertion,
"//saml2:SubjectConfirmationData");
// when
verifier.verifyAssertionExpirationDate(nodeAssertion,
nodeConfirmationData);
}
@Test
public void verifyInResponseTo() throws Exception {
// given
Node nodeAssertion = getActualFirstChild(assertion);
Node nodeConfirmationData = XMLConverter.getNodeByXPath(nodeAssertion,
"//saml2:SubjectConfirmationData");
// when
verifier.verifyInResponseTo(nodeAssertion, nodeConfirmationData);
// then no exception is expected
}
@Test(expected = AssertionValidationException.class)
public void verifyInResponseTo_error() throws Exception {
// given
Node nodeAssertion = getActualFirstChild(assertion);
Node nodeConfirmationData = XMLConverter.getNodeByXPath(nodeAssertion,
"//saml2:SubjectConfirmationData");
verifier.requestId = "wrong request id";
// when
verifier.verifyInResponseTo(nodeAssertion, nodeConfirmationData);
}
private Node getActualFirstChild(Document doc) {
Node firstNode = doc.getFirstChild();
if (firstNode.getNodeName().equals("#comment")) {
firstNode = firstNode.getNextSibling();
}
return firstNode;
}
}
|
|
package org.ovirt.engine.ui.userportal.client.components;
import com.google.gwt.core.client.GWT;
import org.ovirt.engine.core.compat.EventArgs;
import org.ovirt.engine.core.compat.PropertyChangedEventArgs;
import org.ovirt.engine.ui.uicommon.models.EntityModel;
import org.ovirt.engine.ui.uicommon.models.ListModel;
import org.ovirt.engine.ui.uicommon.models.Model;
import org.ovirt.engine.core.compat.Event;
import org.ovirt.engine.core.compat.IEventListener;
import org.ovirt.engine.ui.userportal.client.binders.ModelBindedComponent;
import org.ovirt.engine.ui.userportal.client.parsers.UPParser;
import com.smartgwt.client.types.Alignment;
import com.smartgwt.client.widgets.form.fields.SelectItem;
import com.smartgwt.client.widgets.form.fields.TextItem;
import com.smartgwt.client.widgets.form.fields.events.BlurEvent;
import com.smartgwt.client.widgets.form.fields.events.BlurHandler;
import com.smartgwt.client.widgets.form.fields.events.KeyUpEvent;
import com.smartgwt.client.widgets.form.fields.events.KeyUpHandler;
public class TextItemEntityModelBinded extends UPTextItem implements ModelBindedComponent {
EntityModel entityModel;
UPParser parser;
public TextItemEntityModelBinded(String title, EntityModel model) {
this(true, title, model, false);
}
public TextItemEntityModelBinded(String title, EntityModel model, boolean isIntField) {
this(true, title, model, isIntField, null);
}
public TextItemEntityModelBinded(String title, EntityModel model, boolean isIntField, UPParser parser) {
this(true, title, model, isIntField, parser);
}
public TextItemEntityModelBinded(boolean async, String title, EntityModel model, boolean isIntField) {
this(async, title, model, isIntField, null);
}
public TextItemEntityModelBinded(boolean async, String title, EntityModel model, boolean isIntField, UPParser parser) {
this.parser = parser;
entityModel = model;
setDisabled(!model.getIsChangable());
setTitle("<nobr>" + title + "</nobr>");
setTitleAlign(Alignment.LEFT);
String value = "";
if (model.getEntity() instanceof String)
value = (String)model.getEntity();
if (model.getEntity() instanceof Integer) {
value = ((Integer)model.getEntity()).toString();
}
if (parser != null) {
value = parser.format(value);
}
setValue(value);
addKeyUpHandler(new NewVmTextItemKeyUpHandler(isIntField));
addBlurHandler(new NewVmTextItemBlurHandler(isIntField));
model.getEntityChangedEvent().addListener(new TextItemChangedEventListener(this));
model.getPropertyChangedEvent().addListener(new TextItemPropertyChangedEventListener(this));
if (!async) {
entityModel.getEntityChangedEvent().raise(model, EventArgs.Empty);
}
}
public class NewVmTextItemKeyUpHandler implements KeyUpHandler {
boolean convertToInt = false;
public NewVmTextItemKeyUpHandler(boolean convertToInt) {
this.convertToInt = convertToInt;
}
@Override
public void onKeyUp(KeyUpEvent event) {
TextItemEntityModelBinded source = (TextItemEntityModelBinded) event.getSource();
String title = source.getTitle();
Object inputValue = source.getValue();
Object currentValue = entityModel.getEntity();
GWT.log("Getting:" + currentValue);
// parse
if (parser != null) {
inputValue = parser.parse((String) inputValue);
}
// convert int
if (convertToInt) {
try {
inputValue = Integer.parseInt((String) inputValue);
} catch (NumberFormatException e) {
return;
}
}
// do not set if value has not changed
if (currentValue != null && currentValue.equals(inputValue)) {
return;
}
// set
GWT.log("Setting " + title + " to: " + inputValue);
entityModel.setEntity(inputValue);
}
}
public class NewVmTextItemBlurHandler implements BlurHandler {
boolean convertToInt = false;
public NewVmTextItemBlurHandler(boolean convertToInt) {
this.convertToInt = convertToInt;
}
@Override
public void onBlur(BlurEvent event) {
TextItemEntityModelBinded source = (TextItemEntityModelBinded) event.getSource();
String inputValue = (String) source.getValue();
// display formatted value if a parser exists
if (parser != null) {
String displayValue = parser.format((String) inputValue);
source.setValue(displayValue);
}
}
}
public class TextItemChangedEventListener implements IEventListener {
private TextItemEntityModelBinded textItem;
public TextItemChangedEventListener(TextItemEntityModelBinded textItem) {
this.textItem = textItem;
}
@Override
public void eventRaised(Event ev, Object sender, EventArgs args) {
GWT.log("Text item changed: " + textItem.getTitle() + " Changed to: " + ((EntityModel)sender).getEntity());
if (((EntityModel)sender).getEntity() != null) {
String newValue = ((EntityModel)sender).getEntity().toString();
// if the value is same as the current one, do nothing
if (newValue.equals(textItem.getValue())) {
return;
}
if (parser == null) {
textItem.setValue(newValue);
}
else {
textItem.setValue(parser.format(newValue));
}
textItem.redraw();
}
else {
textItem.setValue("");
}
}
}
public class TextItemPropertyChangedEventListener implements IEventListener {
private TextItemEntityModelBinded textItem;
public TextItemPropertyChangedEventListener(TextItemEntityModelBinded textItem) {
this.textItem = textItem;
}
@Override
public void eventRaised(Event ev, Object sender, EventArgs args) {
String property = ((PropertyChangedEventArgs)args).PropertyName;
if (property.equals("IsChangable")) {
textItem.setDisabled(!((Model)sender).getIsChangable());
}
else if (property.equals("IsAvailable")) {
if (!entityModel.getIsAvailable()) {
hide();
}
else {
show();
}
}
else if (property.equals("IsValid")) {
if (!entityModel.getIsValid()) {
setTextBoxStyle("textBoxInvalid");
if (entityModel.getInvalidityReasons() != null && entityModel.getInvalidityReasons().size()>0) {
setTooltip("<nobr>" + entityModel.getInvalidityReasons().iterator().next() + "</nobr>");
setHoverStyle("gridToolTipStyle");
setHoverWidth(1);
setHoverDelay(100);
}
}
else {
setTextBoxStyle("engineTextItem");
setTooltip("");
}
}
}
}
public Model getModel() {
return entityModel;
}
}
|
|
/*
* Javolution - Java(tm) Solution for Real-Time and Embedded Systems
* Copyright (c) 2012, Javolution (http://javolution.org/)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.intellij.util.text;
import com.intellij.openapi.util.text.CharSequenceWithStringHash;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* A pruned and optimized version of javolution.text.Text
*
* <p> This class represents an immutable character sequence with
* fast {@link #concat concatenation}, {@link #insert insertion} and
* {@link #delete deletion} capabilities (O[Log(n)]) instead of
* O[n] for StringBuffer/StringBuilder).</p>
*
* <p><i> Implementation Note: To avoid expensive copy operations ,
* {@link ImmutableText} instances are broken down into smaller immutable
* sequences, they form a minimal-depth binary tree.
* The tree is maintained balanced automatically through <a
* href="http://en.wikipedia.org/wiki/Tree_rotation">tree rotations</a>.
* Insertion/deletions are performed in {@code O[Log(n)]}
* instead of {@code O[n]} for
* {@code StringBuffer/StringBuilder}.</i></p>
*
* @author <a href="mailto:jean-marie@dautelle.com">Jean-Marie Dautelle</a>
* @author Wilfried Middleton
* @version 5.3, January 10, 2007
*/
@SuppressWarnings({"AssignmentToForLoopParameter","UnnecessaryThis"})
final class ImmutableText extends ImmutableCharSequence implements CharArrayExternalizable, CharSequenceWithStringHash {
/**
* Holds the default size for primitive blocks of characters.
*/
private static final int BLOCK_SIZE = 1 << 6;
/**
* Holds the mask used to ensure a block boundary cesures.
*/
private static final int BLOCK_MASK = ~(BLOCK_SIZE - 1);
private final Node myNode;
private ImmutableText(Node node) {
myNode = node;
}
/**
* Returns the text representing the specified object.
*
* @param obj the object to represent as text.
* @return the textual representation of the specified object.
*/
static ImmutableText valueOf(@NotNull Object obj) {
if (obj instanceof ImmutableText) return (ImmutableText)obj;
if (obj instanceof CharSequence) return ((CharSequence)obj).length() == 0 ? EMPTY : valueOf((CharSequence)obj);
return valueOf(String.valueOf(obj));
}
private static ImmutableText valueOf(@NotNull CharSequence str) {
return new ImmutableText(createLeafNode(str));
}
private static LeafNode createLeafNode(@NotNull CharSequence str) {
byte[] bytes = toBytesIfPossible(str);
if (bytes != null) {
return new Leaf8BitNode(bytes);
}
char[] chars = new char[str.length()];
CharArrayUtil.getChars(str, chars, 0, 0, str.length());
return new WideLeafNode(chars);
}
@Nullable
private static byte[] toBytesIfPossible(CharSequence seq) {
if (seq instanceof ByteArrayCharSequence) {
return ((ByteArrayCharSequence)seq).getBytes();
}
byte[] bytes = new byte[seq.length()];
char[] chars = CharArrayUtil.fromSequenceWithoutCopying(seq);
if (chars == null) {
for (int i = 0; i < bytes.length; i++) {
char c = seq.charAt(i);
if ((c & 0xff00) != 0) {
return null;
}
bytes[i] = (byte)c;
}
}
else {
for (int i = 0; i < bytes.length; i++) {
char c = chars[i];
if ((c & 0xff00) != 0) {
return null;
}
bytes[i] = (byte)c;
}
}
return bytes;
}
/**
* When first loaded, ImmutableText contents are stored as a single large array. This saves memory but isn't
* modification-friendly as it disallows slightly changed texts to retain most of the internal structure of the
* original document. Whoever retains old non-chunked version will use more memory than really needed.
*
* @return a copy of this text better prepared for small modifications to fully enable structure-sharing capabilities
*/
private ImmutableText ensureChunked() {
if (length() > BLOCK_SIZE && myNode instanceof LeafNode) {
return new ImmutableText(nodeOf((LeafNode)myNode, 0, length()));
}
return this;
}
private static Node nodeOf(@NotNull LeafNode node, int offset, int length) {
if (length <= BLOCK_SIZE) {
return node.subNode(offset, offset+length);
}
// Splits on a block boundary.
int half = ((length + BLOCK_SIZE) >> 1) & BLOCK_MASK;
return new CompositeNode(nodeOf(node, offset, half), nodeOf(node, offset + half, length - half));
}
private static final LeafNode EMPTY_NODE = new Leaf8BitNode(ArrayUtil.EMPTY_BYTE_ARRAY);
private static final ImmutableText EMPTY = new ImmutableText(EMPTY_NODE);
/**
* Returns the length of this text.
*
* @return the number of characters (16-bits Unicode) composing this text.
*/
@Override
public int length() {
return myNode.length();
}
/**
* Concatenates the specified text to the end of this text.
* This method is very fast (faster even than
* {@code StringBuffer.append(String)}) and still returns
* a text instance with an internal binary tree of minimal depth!
*
* @param that the text that is concatenated.
* @return {@code this + that}
*/
private ImmutableText concat(ImmutableText that) {
return that.length() == 0 ? this : length() == 0 ? that : new ImmutableText(concatNodes(ensureChunked().myNode, that.ensureChunked().myNode));
}
@Override
public ImmutableText concat(@NotNull CharSequence sequence) {
return concat(valueOf(sequence));
}
/**
* Returns a portion of this text.
*
* @param start the index of the first character inclusive.
* @return the sub-text starting at the specified position.
* @throws IndexOutOfBoundsException if {@code (start < 0) ||
* (start > this.length())}
*/
private ImmutableText subtext(int start) {
return subtext(start, length());
}
@Override
public ImmutableText insert(int index, @NotNull CharSequence seq) {
if (seq.length() == 0) return this;
return subtext(0, index).concat(valueOf(seq)).concat(subtext(index));
}
/**
* Returns the text without the characters between the specified indexes.
*
* @param start the beginning index, inclusive.
* @param end the ending index, exclusive.
* @return {@code subtext(0, start).concat(subtext(end))}
* @throws IndexOutOfBoundsException if {@code (start < 0) || (end < 0) ||
* (start > end) || (end > this.length()}
*/
@Override
public ImmutableText delete(int start, int end) {
if (start == end) return this;
if (start > end) {
throw new IndexOutOfBoundsException();
}
return ensureChunked().subtext(0, start).concat(subtext(end));
}
@Override
public CharSequence subSequence(final int start, final int end) {
if (start == 0 && end == length()) return this;
return new CharSequenceSubSequence(this, start, end);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof ImmutableText)) {
return false;
}
return CharArrayUtil.regionMatches(this, 0, (ImmutableText)obj);
}
private transient int hash;
/**
* Returns the hash code for this text.
*
* @return the hash code value.
*/
@Override
public int hashCode() {
int h = hash;
if (h == 0) {
hash = h = StringUtil.stringHashCode(this, 0, length());
}
return h;
}
@Override
public char charAt(int index) {
InnerLeaf leaf = myLastLeaf;
if (leaf == null || index < leaf.offset || index >= leaf.end) {
myLastLeaf = leaf = findLeaf(index);
}
return leaf.leafNode.charAt(index - leaf.offset);
}
private InnerLeaf myLastLeaf;
private InnerLeaf findLeaf(int index) {
Node node = myNode;
if (index < 0 || index >= node.length()) throw new IndexOutOfBoundsException("Index out of range: " + index);
int offset = 0;
while (true) {
if (index >= node.length()) {
throw new IndexOutOfBoundsException();
}
if (node instanceof LeafNode) {
return new InnerLeaf((LeafNode)node, offset);
}
CompositeNode composite = (CompositeNode)node;
if (index < composite.head.length()) {
node = composite.head;
}
else {
offset += composite.head.length();
index -= composite.head.length();
node = composite.tail;
}
}
}
private static class InnerLeaf {
final LeafNode leafNode;
final int offset;
final int end;
private InnerLeaf(@NotNull LeafNode leafNode, int offset) {
this.leafNode = leafNode;
this.offset = offset;
this.end = offset + leafNode.length();
}
}
/**
* Returns a portion of this text.
*
* @param start the index of the first character inclusive.
* @param end the index of the last character exclusive.
* @return the sub-text starting at the specified start position and
* ending just before the specified end position.
* @throws IndexOutOfBoundsException if {@code (start < 0) || (end < 0) ||
* (start > end) || (end > this.length())}
*/
@Override
public ImmutableText subtext(int start, int end) {
if (start < 0 || start > end || end > length()) {
throw new IndexOutOfBoundsException();
}
if (start == 0 && end == length()) {
return this;
}
if (start == end) {
return EMPTY;
}
return new ImmutableText(myNode.subNode(start, end));
}
/**
* Copies the characters from this text into the destination
* character array.
*
* @param start the index of the first character to copy.
* @param end the index after the last character to copy.
* @param dest the destination array.
* @param destPos the start offset in the destination array.
* @throws IndexOutOfBoundsException if {@code (start < 0) || (end < 0) ||
* (start > end) || (end > this.length())}
*/
@Override
public void getChars(int start, int end, @NotNull char[] dest, int destPos) {
myNode.getChars(start, end, dest, destPos);
}
/**
* Returns the {@code String} representation of this text.
*
* @return the {@code java.lang.String} for this text.
*/
@Override
@NotNull
public String toString() {
return myNode.toString();
}
private abstract static class Node implements CharSequence {
abstract void getChars(int start, int end, @NotNull char[] dest, int destPos);
abstract Node subNode(int start, int end);
@NotNull
@Override
public String toString() {
int len = length();
char[] data = new char[len];
getChars(0, len, data, 0);
return StringFactory.createShared(data);
}
@Override
public CharSequence subSequence(int start, int end) {
return subNode(start, end);
}
}
private abstract static class LeafNode extends Node {
}
@NotNull
private static Node concatNodes(@NotNull Node node1, @NotNull Node node2) {
// All Text instances are maintained balanced:
// (head < tail * 2) & (tail < head * 2)
final int length = node1.length() + node2.length();
if (length <= BLOCK_SIZE) { // Merges to primitive.
return createLeafNode(new MergingCharSequence(node1, node2));
}
else { // Returns a composite.
Node head = node1;
Node tail = node2;
if ((head.length() << 1) < tail.length() && tail instanceof CompositeNode) {
// head too small, returns (head + tail/2) + (tail/2)
if (((CompositeNode)tail).head.length() > ((CompositeNode)tail).tail.length()) {
// Rotates to concatenate with smaller part.
tail = ((CompositeNode)tail).rightRotation();
}
head = concatNodes(head, ((CompositeNode)tail).head);
tail = ((CompositeNode)tail).tail;
}
else if ((tail.length() << 1) < head.length() && head instanceof CompositeNode) {
// tail too small, returns (head/2) + (head/2 concat tail)
if (((CompositeNode)head).tail.length() > ((CompositeNode)head).head.length()) {
// Rotates to concatenate with smaller part.
head = ((CompositeNode)head).leftRotation();
}
tail = concatNodes(((CompositeNode)head).tail, tail);
head = ((CompositeNode)head).head;
}
return new CompositeNode(head, tail);
}
}
private static class WideLeafNode extends LeafNode {
private final char[] data;
WideLeafNode(@NotNull char[] data) {
this.data = data;
}
@Override
public int length() {
return data.length;
}
@Override
void getChars(int start, int end, @NotNull char[] dest, int destPos) {
if (start < 0 || end > length() || start > end) {
throw new IndexOutOfBoundsException();
}
System.arraycopy(data, start, dest, destPos, end - start);
}
@Override
Node subNode(int start, int end) {
if (start == 0 && end == length()) {
return this;
}
return createLeafNode(new CharArrayCharSequence(data, start, end));
}
@NotNull
@Override
public String toString() {
return StringFactory.createShared(data);
}
@Override
public char charAt(int index) {
return data[index];
}
}
private static class Leaf8BitNode extends LeafNode {
private final byte[] data;
Leaf8BitNode(@NotNull byte[] data) {
this.data = data;
}
@Override
public int length() {
return data.length;
}
@Override
void getChars(int start, int end, @NotNull char[] dest, int destPos) {
if (start < 0 || end > length() || start > end) {
throw new IndexOutOfBoundsException();
}
for (int i=start;i<end;i++) {
dest[destPos++] = byteToChar(data[i]);
}
}
@Override
LeafNode subNode(int start, int end) {
if (start == 0 && end == length()) {
return this;
}
int length = end - start;
byte[] chars = new byte[length];
System.arraycopy(data, start, chars, 0, length);
return new Leaf8BitNode(chars);
}
@Override
public char charAt(int index) {
return byteToChar(data[index]);
}
private static char byteToChar(byte b) {
return (char)(b & 0xff);
}
}
private static class CompositeNode extends Node {
final int count;
final Node head;
final Node tail;
CompositeNode(Node head, Node tail) {
count = head.length() + tail.length();
this.head = head;
this.tail = tail;
}
@Override
public int length() {
return count;
}
@Override
public char charAt(int index) {
int headLength = head.length();
return index < headLength ? head.charAt(index) : tail.charAt(index - headLength);
}
Node rightRotation() {
// See: http://en.wikipedia.org/wiki/Tree_rotation
Node P = this.head;
if (!(P instanceof CompositeNode)) {
return this; // Head not a composite, cannot rotate.
}
Node A = ((CompositeNode)P).head;
Node B = ((CompositeNode)P).tail;
Node C = this.tail;
return new CompositeNode(A, new CompositeNode(B, C));
}
Node leftRotation() {
// See: http://en.wikipedia.org/wiki/Tree_rotation
Node Q = this.tail;
if (!(Q instanceof CompositeNode)) {
return this; // Tail not a composite, cannot rotate.
}
Node B = ((CompositeNode)Q).head;
Node C = ((CompositeNode)Q).tail;
Node A = this.head;
return new CompositeNode(new CompositeNode(A, B), C);
}
@Override
void getChars(int start, int end, @NotNull char[] dest, int destPos) {
final int cesure = head.length();
if (end <= cesure) {
head.getChars(start, end, dest, destPos);
}
else if (start >= cesure) {
tail.getChars(start - cesure, end - cesure, dest, destPos);
}
else { // Overlaps head and tail.
head.getChars(start, cesure, dest, destPos);
tail.getChars(0, end - cesure, dest, destPos + cesure - start);
}
}
@Override
Node subNode(int start, int end) {
final int cesure = head.length();
if (end <= cesure) {
return head.subNode(start, end);
}
if (start >= cesure) {
return tail.subNode(start - cesure, end - cesure);
}
if (start == 0 && end == count) {
return this;
}
// Overlaps head and tail.
return concatNodes(head.subNode(start, cesure), tail.subNode(0, end - cesure));
}
}
}
|
|
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.trackselection;
import android.os.SystemClock;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.source.TrackGroup;
import com.google.android.exoplayer2.source.chunk.MediaChunk;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
/**
* An abstract base class suitable for most {@link TrackSelection} implementations.
*/
public abstract class BaseTrackSelection implements TrackSelection {
/**
* The selected {@link TrackGroup}.
*/
protected final TrackGroup group;
/**
* The number of selected tracks within the {@link TrackGroup}. Always greater than zero.
*/
protected final int length;
/**
* The indices of the selected tracks in {@link #group}, in order of decreasing bandwidth.
*/
protected final int[] tracks;
/**
* The {@link Format}s of the selected tracks, in order of decreasing bandwidth.
*/
private final Format[] formats;
/**
* Selected track blacklist timestamps, in order of decreasing bandwidth.
*/
private final long[] blacklistUntilTimes;
// Lazily initialized hashcode.
private int hashCode;
/**
* @param group The {@link TrackGroup}. Must not be null.
* @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be
* null or empty. May be in any order.
*/
public BaseTrackSelection(TrackGroup group, int... tracks) {
Assertions.checkState(tracks.length > 0);
this.group = Assertions.checkNotNull(group);
this.length = tracks.length;
// Set the formats, sorted in order of decreasing bandwidth.
formats = new Format[length];
for (int i = 0; i < tracks.length; i++) {
formats[i] = group.getFormat(tracks[i]);
}
Arrays.sort(formats, new DecreasingBandwidthComparator());
// Set the format indices in the same order.
this.tracks = new int[length];
for (int i = 0; i < length; i++) {
this.tracks[i] = group.indexOf(formats[i]);
}
blacklistUntilTimes = new long[length];
}
@Override
public void enable() {
// Do nothing.
}
@Override
public void disable() {
// Do nothing.
}
@Override
public final TrackGroup getTrackGroup() {
return group;
}
@Override
public final int length() {
return tracks.length;
}
@Override
public final Format getFormat(int index) {
return formats[index];
}
@Override
public final int getIndexInTrackGroup(int index) {
return tracks[index];
}
@Override
@SuppressWarnings("ReferenceEquality")
public final int indexOf(Format format) {
for (int i = 0; i < length; i++) {
if (formats[i] == format) {
return i;
}
}
return C.INDEX_UNSET;
}
@Override
public final int indexOf(int indexInTrackGroup) {
for (int i = 0; i < length; i++) {
if (tracks[i] == indexInTrackGroup) {
return i;
}
}
return C.INDEX_UNSET;
}
@Override
public final Format getSelectedFormat() {
return formats[getSelectedIndex()];
}
@Override
public final int getSelectedIndexInTrackGroup() {
return tracks[getSelectedIndex()];
}
@Override
public void onPlaybackSpeed(float playbackSpeed) {
// Do nothing.
}
@Override
public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
return queue.size();
}
@Override
public final boolean blacklist(int index, long blacklistDurationMs) {
long nowMs = SystemClock.elapsedRealtime();
boolean canBlacklist = isBlacklisted(index, nowMs);
for (int i = 0; i < length && !canBlacklist; i++) {
canBlacklist = i != index && !isBlacklisted(i, nowMs);
}
if (!canBlacklist) {
return false;
}
blacklistUntilTimes[index] =
Math.max(
blacklistUntilTimes[index],
Util.addWithOverflowDefault(nowMs, blacklistDurationMs, Long.MAX_VALUE));
return true;
}
/**
* Returns whether the track at the specified index in the selection is blacklisted.
*
* @param index The index of the track in the selection.
* @param nowMs The current time in the timebase of {@link SystemClock#elapsedRealtime()}.
*/
protected final boolean isBlacklisted(int index, long nowMs) {
return blacklistUntilTimes[index] > nowMs;
}
// Object overrides.
@Override
public int hashCode() {
if (hashCode == 0) {
hashCode = 31 * System.identityHashCode(group) + Arrays.hashCode(tracks);
}
return hashCode;
}
// Track groups are compared by identity not value, as distinct groups may have the same value.
@Override
@SuppressWarnings({"ReferenceEquality", "EqualsGetClass"})
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
BaseTrackSelection other = (BaseTrackSelection) obj;
return group == other.group && Arrays.equals(tracks, other.tracks);
}
/**
* Sorts {@link Format} objects in order of decreasing bandwidth.
*/
private static final class DecreasingBandwidthComparator implements Comparator<Format> {
@Override
public int compare(Format a, Format b) {
return b.bitrate - a.bitrate;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.markup.parser;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.ParseException;
import org.apache.wicket.markup.parser.XmlTag.TagType;
import org.apache.wicket.markup.parser.XmlTag.TextSegment;
import org.apache.wicket.util.io.FullyBufferedReader;
import org.apache.wicket.util.io.IOUtils;
import org.apache.wicket.util.io.XmlReader;
import org.apache.wicket.util.lang.Args;
import org.apache.wicket.util.parse.metapattern.parsers.TagNameParser;
import org.apache.wicket.util.parse.metapattern.parsers.VariableAssignmentParser;
import org.apache.wicket.util.resource.ResourceStreamNotFoundException;
import org.apache.wicket.util.string.Strings;
/**
* A fairly shallow markup pull parser which parses a markup string of a given type of markup (for
* example, html, xml, vxml or wml) into ComponentTag and RawMarkup tokens.
*
* @author Jonathan Locke
* @author Juergen Donnerstag
*/
public final class XmlPullParser implements IXmlPullParser
{
/** */
public static final String STYLE = "style";
/** */
public static final String SCRIPT = "script";
/**
* Reads the xml data from an input stream and converts the chars according to its encoding
* (<?xml ... encoding="..." ?>)
*/
private XmlReader xmlReader;
/**
* A XML independent reader which loads the whole source data into memory and which provides
* convenience methods to access the data.
*/
private FullyBufferedReader input;
/** temporary variable which will hold the name of the closing tag. */
private String skipUntilText;
/** The last substring selected from the input */
private CharSequence lastText;
/** Everything in between <!DOCTYPE ... > */
private CharSequence doctype;
/** The type of what is in lastText */
private HttpTagType lastType = HttpTagType.NOT_INITIALIZED;
/** The last tag found */
private XmlTag lastTag;
/**
* Construct.
*/
public XmlPullParser()
{
}
public final String getEncoding()
{
return xmlReader.getEncoding();
}
public final CharSequence getDoctype()
{
return doctype;
}
public final CharSequence getInputFromPositionMarker(final int toPos)
{
return input.getSubstring(toPos);
}
public final CharSequence getInput(final int fromPos, final int toPos)
{
return input.getSubstring(fromPos, toPos);
}
/**
* Whatever will be in between the current index and the closing tag, will be ignored (and thus
* treated as raw markup (text). This is useful for tags like 'script'.
*
* @throws ParseException
*/
private final void skipUntil() throws ParseException
{
// this is a tag with non-XHTML text as body - skip this until the
// skipUntilText is found.
final int startIndex = input.getPosition();
final int tagNameLen = skipUntilText.length();
int pos = input.getPosition() - 1;
String endTagText = null;
int lastPos = 0;
while (!skipUntilText.equalsIgnoreCase(endTagText))
{
pos = input.find("</", pos + 1);
if ((pos == -1) || ((pos + (tagNameLen + 2)) >= input.size()))
{
throw new ParseException(
skipUntilText + " tag not closed" + getLineAndColumnText(), startIndex);
}
lastPos = pos + 2;
endTagText = input.getSubstring(lastPos, lastPos + tagNameLen).toString();
}
input.setPosition(pos);
lastText = input.getSubstring(startIndex, pos);
lastType = HttpTagType.BODY;
// Check that the tag is properly closed
lastPos = input.find('>', lastPos + tagNameLen);
if (lastPos == -1)
{
throw new ParseException(skipUntilText + " tag not closed" + getLineAndColumnText(),
startIndex);
}
// Reset the state variable
skipUntilText = null;
}
/**
*
* @return line and column number
*/
private String getLineAndColumnText()
{
return " (line " + input.getLineNumber() + ", column " + input.getColumnNumber() + ")";
}
/**
* @return XXX
* @throws ParseException
*/
public final HttpTagType next() throws ParseException
{
// Reached end of markup file?
if (input.getPosition() >= input.size())
{
return HttpTagType.NOT_INITIALIZED;
}
if (skipUntilText != null)
{
skipUntil();
return lastType;
}
// Any more tags in the markup?
final int openBracketIndex = input.find('<');
// Tag or Body?
if (input.charAt(input.getPosition()) != '<')
{
// It's a BODY
if (openBracketIndex == -1)
{
// There is no next matching tag.
lastText = input.getSubstring(-1);
input.setPosition(input.size());
lastType = HttpTagType.BODY;
return lastType;
}
lastText = input.getSubstring(openBracketIndex);
input.setPosition(openBracketIndex);
lastType = HttpTagType.BODY;
return lastType;
}
// Determine the line number
input.countLinesTo(openBracketIndex);
// Get index of closing tag and advance past the tag
int closeBracketIndex = -1;
if (openBracketIndex != -1 && openBracketIndex < input.size() - 1)
{
char nextChar = input.charAt(openBracketIndex + 1);
if ((nextChar == '!') || (nextChar == '?'))
closeBracketIndex = input.find('>', openBracketIndex);
else
closeBracketIndex = input.findOutOfQuotes('>', openBracketIndex);
}
if (closeBracketIndex == -1)
{
throw new ParseException("No matching close bracket at" + getLineAndColumnText(),
input.getPosition());
}
// Get the complete tag text
lastText = input.getSubstring(openBracketIndex, closeBracketIndex + 1);
// Get the tagtext between open and close brackets
String tagText = lastText.subSequence(1, lastText.length() - 1).toString();
if (tagText.length() == 0)
{
throw new ParseException("Found empty tag: '<>' at" + getLineAndColumnText(),
input.getPosition());
}
// Type of the tag, to be determined next
final TagType type;
// If the tag ends in '/', it's a "simple" tag like <foo/>
if (tagText.endsWith("/"))
{
type = TagType.OPEN_CLOSE;
tagText = tagText.substring(0, tagText.length() - 1);
}
else if (tagText.startsWith("/"))
{
// The tag text starts with a '/', it's a simple close tag
type = TagType.CLOSE;
tagText = tagText.substring(1);
}
else
{
// It must be an open tag
type = TagType.OPEN;
// If open tag and starts with "s" like "script" or "style", than ...
if ((tagText.length() > STYLE.length()) &&
((tagText.charAt(0) == 's') || (tagText.charAt(0) == 'S')))
{
final String lowerCase = tagText.substring(0, 6).toLowerCase();
if (lowerCase.startsWith(SCRIPT))
{
// prepare to skip everything between the open and close tag
skipUntilText = SCRIPT;
}
else if (lowerCase.startsWith(STYLE))
{
// prepare to skip everything between the open and close tag
skipUntilText = STYLE;
}
}
}
// Handle special tags like <!-- and <![CDATA ...
final char firstChar = tagText.charAt(0);
if ((firstChar == '!') || (firstChar == '?'))
{
specialTagHandling(tagText, openBracketIndex, closeBracketIndex);
input.countLinesTo(openBracketIndex);
TextSegment text = new TextSegment(lastText, openBracketIndex, input.getLineNumber(),
input.getColumnNumber());
lastTag = new XmlTag(text, type);
return lastType;
}
TextSegment text = new TextSegment(lastText, openBracketIndex, input.getLineNumber(),
input.getColumnNumber());
XmlTag tag = new XmlTag(text, type);
lastTag = tag;
// Parse the tag text and populate tag attributes
if (parseTagText(tag, tagText))
{
// Move to position after the tag
input.setPosition(closeBracketIndex + 1);
lastType = HttpTagType.TAG;
return lastType;
}
else
{
throw new ParseException("Malformed tag" + getLineAndColumnText(), openBracketIndex);
}
}
/**
* Handle special tags like <!-- --> or <![CDATA[..]]> or <?xml>
*
* @param tagText
* @param openBracketIndex
* @param closeBracketIndex
* @throws ParseException
*/
protected void specialTagHandling(String tagText, final int openBracketIndex,
int closeBracketIndex) throws ParseException
{
// Handle comments
if (tagText.startsWith("!--"))
{
// downlevel-revealed conditional comments e.g.: <!--[if (gt IE9)|!(IE)]><!-->
if (tagText.contains("![endif]--"))
{
lastType = HttpTagType.CONDITIONAL_COMMENT_ENDIF;
// Move to position after the tag
input.setPosition(closeBracketIndex + 1);
return;
}
// Conditional comment? E.g.
// "<!--[if IE]><a href='test.html'>my link</a><![endif]-->"
if (tagText.startsWith("!--[if ") && tagText.endsWith("]"))
{
int pos = input.find("]-->", openBracketIndex + 1);
if (pos == -1)
{
throw new ParseException("Unclosed conditional comment beginning at" +
getLineAndColumnText(), openBracketIndex);
}
pos += 4;
lastText = input.getSubstring(openBracketIndex, pos);
// Actually it is no longer a comment. It is now
// up to the browser to select the section appropriate.
input.setPosition(closeBracketIndex + 1);
lastType = HttpTagType.CONDITIONAL_COMMENT;
}
else
{
// Normal comment section.
// Skip ahead to "-->". Note that you can not simply test for
// tagText.endsWith("--") as the comment might contain a '>'
// inside.
int pos = input.find("-->", openBracketIndex + 1);
if (pos == -1)
{
throw new ParseException("Unclosed comment beginning at" +
getLineAndColumnText(), openBracketIndex);
}
pos += 3;
lastText = input.getSubstring(openBracketIndex, pos);
lastType = HttpTagType.COMMENT;
input.setPosition(pos);
}
return;
}
// The closing tag of a conditional comment, e.g.
// "<!--[if IE]><a href='test.html'>my link</a><![endif]-->
// and also <!--<![endif]-->"
if (tagText.equals("![endif]--"))
{
lastType = HttpTagType.CONDITIONAL_COMMENT_ENDIF;
input.setPosition(closeBracketIndex + 1);
return;
}
// CDATA sections might contain "<" which is not part of an XML tag.
// Make sure escaped "<" are treated right
if (tagText.startsWith("!["))
{
final String startText = (tagText.length() <= 8 ? tagText : tagText.substring(0, 8));
if (startText.toUpperCase().equals("![CDATA["))
{
int pos1 = openBracketIndex;
do
{
// Get index of closing tag and advance past the tag
closeBracketIndex = findChar('>', pos1);
if (closeBracketIndex == -1)
{
throw new ParseException("No matching close bracket at" +
getLineAndColumnText(), input.getPosition());
}
// Get the tagtext between open and close brackets
tagText = input.getSubstring(openBracketIndex + 1, closeBracketIndex)
.toString();
pos1 = closeBracketIndex + 1;
}
while (tagText.endsWith("]]") == false);
// Move to position after the tag
input.setPosition(closeBracketIndex + 1);
lastText = tagText;
lastType = HttpTagType.CDATA;
return;
}
}
if (tagText.charAt(0) == '?')
{
lastType = HttpTagType.PROCESSING_INSTRUCTION;
// Move to position after the tag
input.setPosition(closeBracketIndex + 1);
return;
}
if (tagText.startsWith("!DOCTYPE"))
{
lastType = HttpTagType.DOCTYPE;
// Get the tagtext between open and close brackets
doctype = input.getSubstring(openBracketIndex + 1, closeBracketIndex);
// Move to position after the tag
input.setPosition(closeBracketIndex + 1);
return;
}
// Move to position after the tag
lastType = HttpTagType.SPECIAL_TAG;
input.setPosition(closeBracketIndex + 1);
}
/**
* @return MarkupElement
*/
public final XmlTag getElement()
{
return lastTag;
}
/**
* @return The xml string from the last element
*/
public final CharSequence getString()
{
return lastText;
}
/**
* @return The next XML tag
* @throws ParseException
*/
public final XmlTag nextTag() throws ParseException
{
while (next() != HttpTagType.NOT_INITIALIZED)
{
switch (lastType)
{
case TAG :
return lastTag;
case BODY :
break;
case COMMENT :
break;
case CONDITIONAL_COMMENT :
break;
case CDATA :
break;
case PROCESSING_INSTRUCTION :
break;
case SPECIAL_TAG :
break;
}
}
return null;
}
/**
* Find the char but ignore any text within ".." and '..'
*
* @param ch
* The character to search
* @param startIndex
* Start index
* @return -1 if not found, else the index
*/
private int findChar(final char ch, int startIndex)
{
char quote = 0;
for (; startIndex < input.size(); startIndex++)
{
final char charAt = input.charAt(startIndex);
if (quote != 0)
{
if (quote == charAt)
{
quote = 0;
}
}
else if ((charAt == '"') || (charAt == '\''))
{
quote = charAt;
}
else if (charAt == ch)
{
return startIndex;
}
}
return -1;
}
/**
* Parse the given string.
* <p>
* Note: xml character encoding is NOT applied. It is assumed the input provided does have the
* correct encoding already.
*
* @param string
* The input string
* @throws IOException
* Error while reading the resource
* @throws ResourceStreamNotFoundException
* Resource not found
*/
public void parse(final CharSequence string) throws IOException,
ResourceStreamNotFoundException
{
parse(new ByteArrayInputStream(string.toString().getBytes()), null);
}
/**
* Reads and parses markup from an input stream, using UTF-8 encoding by default when not
* specified in XML declaration.
*
* @param in
* The input stream to read and parse
* @throws IOException
* @throws ResourceStreamNotFoundException
*/
public void parse(final InputStream in) throws IOException, ResourceStreamNotFoundException
{
// When XML declaration does not specify encoding, it defaults to UTF-8
parse(in, "UTF-8");
}
/**
* Reads and parses markup from an input stream
*
* @param inputStream
* The input stream to read and parse
* @param encoding
* The default character encoding of the input
* @throws IOException
*/
public void parse(final InputStream inputStream, final String encoding) throws IOException
{
Args.notNull(inputStream, "inputStream");
try
{
xmlReader = new XmlReader(new BufferedInputStream(inputStream, 4000), encoding);
input = new FullyBufferedReader(xmlReader);
}
finally
{
IOUtils.closeQuietly(inputStream);
IOUtils.closeQuietly(xmlReader);
}
}
public final void setPositionMarker()
{
input.setPositionMarker(input.getPosition());
}
public final void setPositionMarker(final int pos)
{
input.setPositionMarker(pos);
}
@Override
public String toString()
{
return input.toString();
}
/**
* Parses the text between tags. For example, "a href=foo.html".
*
* @param tag
* @param tagText
* The text between tags
* @return false in case of an error
* @throws ParseException
*/
private boolean parseTagText(final XmlTag tag, final String tagText) throws ParseException
{
// Get the length of the tagtext
final int tagTextLength = tagText.length();
// If we match tagname pattern
final TagNameParser tagnameParser = new TagNameParser(tagText);
if (tagnameParser.matcher().lookingAt())
{
// Extract the tag from the pattern matcher
tag.name = tagnameParser.getName();
tag.namespace = tagnameParser.getNamespace();
// Are we at the end? Then there are no attributes, so we just
// return the tag
int pos = tagnameParser.matcher().end(0);
if (pos == tagTextLength)
{
return true;
}
// Extract attributes
final VariableAssignmentParser attributeParser = new VariableAssignmentParser(tagText);
while (attributeParser.matcher().find(pos))
{
// Get key and value using attribute pattern
String value = attributeParser.getValue();
// In case like <html xmlns:wicket> will the value be null
if (value == null)
{
value = "";
}
// Set new position to end of attribute
pos = attributeParser.matcher().end(0);
// Chop off double quotes or single quotes
if (value.startsWith("\"") || value.startsWith("\'"))
{
value = value.substring(1, value.length() - 1);
}
// Trim trailing whitespace
value = value.trim();
// Unescape
value = Strings.unescapeMarkup(value).toString();
// Get key
final String key = attributeParser.getKey();
// Put the attribute in the attributes hash
if (null != tag.getAttributes().put(key, value))
{
throw new ParseException("Same attribute found twice: " + key +
getLineAndColumnText(), input.getPosition());
}
// The input has to match exactly (no left over junk after
// attributes)
if (pos == tagTextLength)
{
return true;
}
}
return true;
}
return false;
}
}
|
|
package lexical;
import java.io.IOException;
import java.io.FileInputStream;
import java.io.PushbackInputStream;
public class LexicalAnalysis implements AutoCloseable
{
private int line;
private SymbolTable st;
private PushbackInputStream input;
public LexicalAnalysis(String filename) throws LexicalException
{
System.out.println(filename);
try
{
st = new SymbolTable();
input = new PushbackInputStream(new FileInputStream(filename));
}
catch (Exception e)
{
throw new LexicalException("Unable to open file");
}
line = 1;
}
public void close()
{
try
{
input.close();
}
catch (Exception e)
{
// ignore
}
}
public int line()
{
return this.line;
}
public Lexeme nextToken() throws IOException
{
Lexeme lex = new Lexeme("", TokenType.END_OF_FILE);
int estado = 1;
int c;
while(estado != 8)
{
c = input.read();
//System.out.println("lexico: (" + estado + ", " + ((char) c) + " [" + c + "])");
switch(estado)
{
case(1):
if(c == -1)
{
lex = new Lexeme("", TokenType.END_OF_FILE);
return lex;
}
else if(c == ' ' || c == '\t' || c == '\r' || c == '\n') {
if(c == '\n') {
line++;
}
estado = 1;
}
else if(c == '#')
estado = 2;
else if(Character.isDigit(c))
{
lex.token += (char)c;
estado = 3;
}
else if(c == '!')
{
lex.token += (char)c;
estado = 4;
}
else if(c == '=' || c == '<' || c == '>')
{
lex.token += (char)c;
estado = 5;
}
else if(Character.isLetter(c))
{
lex.token += (char)c;
estado = 6;
}
else if(c == '\"')
estado = 7;
else if(c == '.' || c == ',' || c == ';' || c == '(' || c == ')'
|| c == '[' || c == ']' || c == '&' || c == '|' || c == '+'
|| c == '-' || c == '*' || c == '/' || c == '%' )
{
lex.token += (char)c;
estado = 8;
}
else
{
lex.token += (char) c;
lex.type = TokenType.INVALID_TOKEN;
return lex;
}
break;
case 2:
if(c == '\n' || c == -1)
estado = 1;
else
estado = 2;
break;
case 3:
if(Character.isDigit(c))
{
lex.token += (char)c;
estado = 3;
}
else
{
if(c != -1)
input.unread(c);
lex.type = TokenType.NUMBER;
return lex;
}
break;
case 4:
if(c == '=')
{
lex.token += (char)c;
estado = 8;
}
else
{
if(c == -1)
{
lex.type = TokenType.UNEXPECTED_EOF;
return lex;
}
else
{
lex.type = TokenType.INVALID_TOKEN;
return lex;
}
}
break;
case 5:
if (c == '=')
{
lex.token += (char) c;
estado = 8;
}
else
{
if (c != -1)
input.unread(c);
estado = 8;
}
break;
case 6:
if(Character.isLetter(c) || Character.isDigit(c))
{
lex.token += (char)c;
estado = 6;
}
else
{
if (c != -1)
input.unread(c);
estado = 8;
}
break;
case 7:
if(c == '"') {
lex.type = TokenType.STRING;
return lex;
} else {
if (c == -1) {
lex.type = TokenType.UNEXPECTED_EOF;
return lex;
}
lex.token += (char) c;
estado = 7;
}
break;
}
}
if(st.contains(lex.token))
{
lex.type = st.find(lex.token);
}
else
{
lex.type = TokenType.VAR;
}
return lex;
}
}
|
|
/* ************************************************************************
#
# designCraft.io
#
# http://designcraft.io/
#
# Copyright:
# Copyright 2014 eTimeline, LLC. All rights reserved.
#
# License:
# See the license.txt file in the project's top-level directory for details.
#
# Authors:
# * Andy White
#
************************************************************************ */
package dcraft.ctp.stream;
import io.netty.buffer.ByteBuf;
import java.io.IOException;
import java.nio.file.Paths;
import org.bouncycastle.openpgp.PGPException;
import org.bouncycastle.openpgp.PGPPublicKeyRing;
import dcraft.ctp.f.FileDescriptor;
import dcraft.lang.op.OperationContext;
import dcraft.log.Logger;
import dcraft.pgp.EncryptedFileStream;
import dcraft.script.StackEntry;
import dcraft.util.FileUtil;
import dcraft.xml.XElement;
public class PgpEncryptStream extends TransformStream {
protected EncryptedFileStream pgp = new EncryptedFileStream();
protected boolean needInit = true;
protected FileDescriptor efile = null;
public PgpEncryptStream() {
}
public PgpEncryptStream withPgpKeyring(PGPPublicKeyRing ring) {
this.pgp.addPublicKey(ring);
return this;
}
public PgpEncryptStream withAlgorithm(int v) {
this.pgp.setAlgorithm(v);
return this;
}
@Override
public void init(StackEntry stack, XElement el) {
String keyPath = stack.stringFromElement(el, "Keyring");
try {
this.pgp.loadPublicKey(Paths.get(keyPath));
}
catch (IOException x) {
OperationContext.get().error("Unabled to read keyfile: " + x);
}
catch (PGPException x) {
OperationContext.get().error("Unabled to load keyfile: " + x);
}
}
@Override
public void close() {
try {
this.pgp.close();
}
catch (PGPException x) {
// it should already be closed, unless we got here by a task kill/cancel
Logger.warn("Error closing PGP stream: " + x);
}
super.close();
}
// make sure we don't return without first releasing the file reference content
@Override
public ReturnOption handle(FileSlice slice) {
if (slice == FileSlice.FINAL)
return this.downstream.handle(slice);
if (this.needInit) {
this.pgp.setFileName(slice.file.path().getFileName());
try {
this.pgp.init();
}
catch (Exception x) {
OperationContext.get().getTaskRun().kill("PGP init failed: " + x);
return ReturnOption.DONE;
}
this.initializeFileValues(slice.file);
this.needInit = false;
}
// inflate the payload into 1 or more outgoing buffers set in a queue
ByteBuf in = slice.data;
if (in != null) {
this.pgp.writeData(in);
in.release();
if (OperationContext.get().getTaskRun().isComplete())
return ReturnOption.DONE;
}
// write all buffers in the queue
ByteBuf buf = this.pgp.nextReadyBuffer();
while (buf != null) {
ReturnOption ret = this.nextMessage(buf);
if (ret != ReturnOption.CONTINUE)
return ret;
buf = this.pgp.nextReadyBuffer();
}
if (slice.isEof()) {
try {
this.pgp.close();
}
catch (PGPException x) {
OperationContext.get().getTaskRun().kill("PGP close failed: " + x);
return ReturnOption.DONE;
}
// write all buffers in the queue
buf = this.pgp.nextReadyBuffer();
while (buf != null) {
ReturnOption ret = this.nextMessage(buf);
if (ret != ReturnOption.CONTINUE)
return ret;
buf = this.pgp.nextReadyBuffer();
}
ReturnOption ret = this.lastMessage();
if (ret != ReturnOption.CONTINUE)
return ret;
}
// otherwise we need more data
return ReturnOption.CONTINUE;
}
public ReturnOption nextMessage(ByteBuf out) {
return this.downstream.handle(FileSlice.allocate(this.efile, out, 0, false));
}
public ReturnOption lastMessage() {
return this.downstream.handle(FileSlice.allocate(this.efile, null, 0, true));
}
public void initializeFileValues(FileDescriptor src) {
this.efile = new FileDescriptor();
if (src.hasPath())
this.efile.setPath(src.getPath().toString() + ".gpg");
else
this.efile.setPath("/" + FileUtil.randomFilename("bin") + ".gpg");
this.efile.setModTime(src.getModTime());
this.efile.setPermissions(src.getPermissions());
}
@Override
public void read() {
// write all buffers in the queue
ByteBuf buf = this.pgp.nextReadyBuffer();
while (buf != null) {
ReturnOption ret = this.nextMessage(buf);
if (ret != ReturnOption.CONTINUE)
return;
buf = this.pgp.nextReadyBuffer();
}
// if we reached done and we wrote all the buffers, then send the EOF marker if not already
if (this.pgp.isClosed()) {
ReturnOption ret = this.lastMessage();
if (ret != ReturnOption.CONTINUE)
return;
}
this.upstream.read();
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import com.sun.net.httpserver.Headers;
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;
import com.sun.net.httpserver.HttpServer;
import org.apache.http.Consts;
import org.apache.http.Header;
import org.apache.http.HttpHost;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement;
import org.elasticsearch.mocksocket.MockHttpServer;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.client.RestClientTestUtil.getAllStatusCodes;
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}.
* Works against a real http server, one single host.
*/
//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes
@IgnoreJRERequirement
public class RestClientSingleHostIntegTests extends RestClientTestCase {
private static HttpServer httpServer;
private static RestClient restClient;
private static String pathPrefix;
private static Header[] defaultHeaders;
@BeforeClass
public static void startHttpServer() throws Exception {
String pathPrefixWithoutLeadingSlash;
if (randomBoolean()) {
pathPrefixWithoutLeadingSlash = "testPathPrefix/" + randomAsciiOfLengthBetween(1, 5);
pathPrefix = "/" + pathPrefixWithoutLeadingSlash;
} else {
pathPrefix = pathPrefixWithoutLeadingSlash = "";
}
httpServer = createHttpServer();
defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default");
RestClientBuilder restClientBuilder = RestClient.builder(
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())).setDefaultHeaders(defaultHeaders);
if (pathPrefix.length() > 0) {
restClientBuilder.setPathPrefix((randomBoolean() ? "/" : "") + pathPrefixWithoutLeadingSlash);
}
restClient = restClientBuilder.build();
}
private static HttpServer createHttpServer() throws Exception {
HttpServer httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0);
httpServer.start();
//returns a different status code depending on the path
for (int statusCode : getAllStatusCodes()) {
httpServer.createContext(pathPrefix + "/" + statusCode, new ResponseHandler(statusCode));
}
return httpServer;
}
//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes
@IgnoreJRERequirement
private static class ResponseHandler implements HttpHandler {
private final int statusCode;
ResponseHandler(int statusCode) {
this.statusCode = statusCode;
}
@Override
public void handle(HttpExchange httpExchange) throws IOException {
StringBuilder body = new StringBuilder();
try (InputStreamReader reader = new InputStreamReader(httpExchange.getRequestBody(), Consts.UTF_8)) {
char[] buffer = new char[256];
int read;
while ((read = reader.read(buffer)) != -1) {
body.append(buffer, 0, read);
}
}
Headers requestHeaders = httpExchange.getRequestHeaders();
Headers responseHeaders = httpExchange.getResponseHeaders();
for (Map.Entry<String, List<String>> header : requestHeaders.entrySet()) {
responseHeaders.put(header.getKey(), header.getValue());
}
httpExchange.getRequestBody().close();
httpExchange.sendResponseHeaders(statusCode, body.length() == 0 ? -1 : body.length());
if (body.length() > 0) {
try (OutputStream out = httpExchange.getResponseBody()) {
out.write(body.toString().getBytes(Consts.UTF_8));
}
}
httpExchange.close();
}
}
@AfterClass
public static void stopHttpServers() throws IOException {
restClient.close();
restClient = null;
httpServer.stop(0);
httpServer = null;
}
/**
* End to end test for headers. We test it explicitly against a real http client as there are different ways
* to set/add headers to the {@link org.apache.http.client.HttpClient}.
* Exercises the test http server ability to send back whatever headers it received.
*/
public void testHeaders() throws IOException {
for (String method : getHttpMethods()) {
final Set<String> standardHeaders = new HashSet<>(Arrays.asList("Connection", "Host", "User-agent", "Date"));
if (method.equals("HEAD") == false) {
standardHeaders.add("Content-length");
}
final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header");
final int statusCode = randomStatusCode(getRandom());
Response esResponse;
try {
esResponse = restClient.performRequest(method, "/" + statusCode, Collections.<String, String>emptyMap(), requestHeaders);
} catch(ResponseException e) {
esResponse = e.getResponse();
}
assertEquals(method, esResponse.getRequestLine().getMethod());
assertEquals(statusCode, esResponse.getStatusLine().getStatusCode());
assertEquals((pathPrefix.length() > 0 ? pathPrefix : "") + "/" + statusCode, esResponse.getRequestLine().getUri());
assertHeaders(defaultHeaders, requestHeaders, esResponse.getHeaders(), standardHeaders);
for (final Header responseHeader : esResponse.getHeaders()) {
String name = responseHeader.getName();
if (name.startsWith("Header") == false) {
assertTrue("unknown header was returned " + name, standardHeaders.remove(name));
}
}
assertTrue("some expected standard headers weren't returned: " + standardHeaders, standardHeaders.isEmpty());
}
}
/**
* End to end test for delete with body. We test it explicitly as it is not supported
* out of the box by {@link org.apache.http.client.HttpClient}.
* Exercises the test http server ability to send back whatever body it received.
*/
public void testDeleteWithBody() throws IOException {
bodyTest("DELETE");
}
/**
* End to end test for get with body. We test it explicitly as it is not supported
* out of the box by {@link org.apache.http.client.HttpClient}.
* Exercises the test http server ability to send back whatever body it received.
*/
public void testGetWithBody() throws IOException {
bodyTest("GET");
}
private void bodyTest(String method) throws IOException {
String requestBody = "{ \"field\": \"value\" }";
StringEntity entity = new StringEntity(requestBody);
int statusCode = randomStatusCode(getRandom());
Response esResponse;
try {
esResponse = restClient.performRequest(method, "/" + statusCode, Collections.<String, String>emptyMap(), entity);
} catch(ResponseException e) {
esResponse = e.getResponse();
}
assertEquals(method, esResponse.getRequestLine().getMethod());
assertEquals(statusCode, esResponse.getStatusLine().getStatusCode());
assertEquals((pathPrefix.length() > 0 ? pathPrefix : "") + "/" + statusCode, esResponse.getRequestLine().getUri());
assertEquals(requestBody, EntityUtils.toString(esResponse.getEntity()));
}
}
|
|
/*
* Copyright (c) 2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.platform.ui.wizard;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import org.apache.maven.model.Parent;
import org.apache.maven.model.Repository;
import org.apache.maven.model.RepositoryPolicy;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IProjectDescription;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IConfigurationElement;
import org.eclipse.core.runtime.IExecutableExtension;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.preferences.IPreferencesService;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IEditorReference;
import org.eclipse.ui.INewWizard;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.ide.IDE;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
import org.wso2.developerstudio.eclipse.maven.util.MavenUtils;
import org.wso2.developerstudio.eclipse.platform.core.Activator;
import org.wso2.developerstudio.eclipse.platform.core.model.MavenInfo;
import org.wso2.developerstudio.eclipse.platform.core.project.model.ProjectDataModel;
import org.wso2.developerstudio.eclipse.platform.core.project.model.ProjectWizardSettings;
import org.wso2.developerstudio.eclipse.platform.core.utils.Constants;
import org.wso2.developerstudio.eclipse.platform.ui.editor.Refreshable;
import org.wso2.developerstudio.eclipse.platform.ui.wizard.pages.MavenDetailsPage;
import org.wso2.developerstudio.eclipse.platform.ui.wizard.pages.ProjectOptionsDataPage;
import org.wso2.developerstudio.eclipse.platform.ui.wizard.pages.ProjectOptionsPage;
import org.wso2.developerstudio.eclipse.utils.file.FileUtils;
public abstract class AbstractWSO2ProjectCreationWizard extends Wizard implements INewWizard, IExecutableExtension {
protected static final String SNAPSHOTS_UPDATE_POLICY = "SNAPSHOTS_UPDATE_POLICY";
protected static final String SNAPSHOTS_CHECKSUM_POLICY = "SNAPSHOTS_CHECKSUM_POLICY";
protected static final String SNAPSHOTS_ENABLED = "SNAPSHOTS_ENABLED";
protected static final String RELEASES_UPDATE_POLICY = "RELEASES_UPDATE_POLICY";
protected static final String RELEASES_CHECKSUM_POLICY = "RELEASES_CHECKSUM_POLICY";
protected static final String RELEASES_ENABLED = "RELEASES_ENABLED";
protected static final String GLOBAL_REPOSITORY_ID = "GLOBAL_REPOSITORY_ID";
protected static final String GLOBAL_REPOSITORY_URL = "GLOBAL_REPOSITORY_URL";
protected static final String DISABLE_WSO2_REPOSITORY = "DISABLE_WSO2_REPOSITORY";
private static final String GLOBAL_MAVEN_VERSION = "MAVEN_VERSION";
private static final String GLOBAL_MAVEN_GROUP_ID = "MAVEN_GROUPID";
private static IDeveloperStudioLog log = Logger.getLog(Activator.PLUGIN_ID);
private ProjectDataModel model;
private IConfigurationElement configElement;
private ISelection currentSelection;
private boolean customPageRequired;
private WizardPage customPage;
protected final static String DIST_EDITOR_ID = "org.wso2.developerstudio.eclipse.distribution.project.editor.DistProjectEditor";
protected final static String JDT_BUILD_COMMAND = "org.eclipse.jdt.core.javabuilder";
protected final static String JDT_PROJECT_NATURE = "org.eclipse.jdt.core.javanature";
private Map<String, Text> map = new HashMap<String, Text>();
private IPreferencesService preferencesService = Platform.getPreferencesService();
public void setMap(String label, Text txt) {
map.put(label, txt);
}
public Map<String, Text> getMap() {
return map;
}
public void addPages() {
URL resource = getWizardManifest();
try {
ProjectWizardSettings settings = new ProjectWizardSettings(resource.openStream(), configElement);
if (settings.getProjectOptions().size() == 1) {
getModel().setSelectedOption(settings.getProjectOptions().get(0).getId());
} else {
addPage(new ProjectOptionsPage(settings, getModel()));
}
addPage(new ProjectOptionsDataPage(settings, getModel(), getCurrentSelection(),
isRequireProjectLocationSection(), isRequiredWorkingSet(), isRequiredWorkspaceLocation()));
if (isCustomPageRequired()) {
addPage(getCustomPage());
}
if (isProjectWizard()) {
addPage(new MavenDetailsPage(getModel()));
}
} catch (Exception e) {
log.error("error adding pages", e);
}
}
protected Repository getGlobalRepositoryFromPreference() {
String repoURL = preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
GLOBAL_REPOSITORY_URL, null, null);
if (repoURL != null) {
Repository repo = new Repository();
repo.setUrl(repoURL);
repo.setId(preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
GLOBAL_REPOSITORY_ID, null, null));
RepositoryPolicy releasePolicy = new RepositoryPolicy();
String releaseEnabled = preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
RELEASES_ENABLED, null, null);
releasePolicy.setEnabled(releaseEnabled != null);
releasePolicy.setUpdatePolicy(preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
RELEASES_UPDATE_POLICY, null, null));
releasePolicy.setChecksumPolicy(preferencesService.getString(
"org.wso2.developerstudio.eclipse.platform.ui", RELEASES_CHECKSUM_POLICY, null, null));
repo.setReleases(releasePolicy);
RepositoryPolicy snapshotPolicy = new RepositoryPolicy();
String snapshotsEnabled = preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
SNAPSHOTS_ENABLED, null, null);
snapshotPolicy.setEnabled(snapshotsEnabled != null);
snapshotPolicy.setUpdatePolicy(preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
SNAPSHOTS_UPDATE_POLICY, null, null));
snapshotPolicy.setChecksumPolicy(preferencesService.getString(
"org.wso2.developerstudio.eclipse.platform.ui", SNAPSHOTS_CHECKSUM_POLICY, null, null));
repo.setSnapshots(snapshotPolicy);
return repo;
}
return null;
}
protected boolean isProjectWizard() {
String projectAttr = configElement.getAttribute("project");
return projectAttr != null && projectAttr.equals("true");
}
protected boolean isRequireProjectLocationSection() {
return true;
}
protected boolean isRequiredWorkingSet() {
return true;
}
protected boolean isRequiredWorkspaceLocation() {
return false;
}
protected boolean isCustomPageRequired() {
return customPageRequired;
}
protected WizardPage getCustomPage() {
return customPage;
}
public void setCustomPage(WizardPage customPage) {
this.customPage = customPage;
}
public void setCustomPage(boolean customPage) {
this.customPageRequired = customPage;
}
public void setInitializationData(IConfigurationElement configElement, String arg1, Object arg2)
throws CoreException {
this.configElement = configElement;
}
protected URL getWizardManifest() {
if (configElement != null) {
String wizardManifestPath = configElement.getAttribute("wizardManifest");
return Platform.getBundle(configElement.getContributor().getName()).getResource(wizardManifestPath);
}
return null;
}
public void init(IWorkbench arg0, IStructuredSelection selection) {
setCurrentSelection(selection);
}
public void setModel(ProjectDataModel model) {
this.model = model;
}
public ProjectDataModel getModel() {
return model;
}
/**
* There are 3 locations to create project as follows Create project in the
* workspace - no validation Create project in the userDefine location - no
* validation required Create a project in the selection - Validation
* required
*
* @return
* @throws CoreException
*/
public IProject createNewProject() throws CoreException {
IProject project = null;
String name = getModel().getProjectName();
File location = getModel().getLocation();
String rootWorkspaceLocation = ResourcesPlugin.getWorkspace().getRoot().getLocation().toOSString()
+ File.separator + name;
if (rootWorkspaceLocation.equals(location.getPath())) {
project = createProjectInDefaultWorkspace(name);
} else if (getModel().isUserSet()) {
project = createProjectInUserDefineSpace(name, location);
} else {
project = createProjectInSelectionSpace(name, location, rootWorkspaceLocation);
}
return project;
}
private IProject createProjectInUserDefineSpace(String name, File location) throws CoreException {
location = new File(location.getPath() + File.separator + name);
getModel().setLocation(location);
IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
IProject project = root.getProject(name);
IProjectDescription newProjectDescription = project.getWorkspace().newProjectDescription(name);
newProjectDescription.setLocationURI(location.toURI());
project.create(newProjectDescription, new NullProgressMonitor());
project.open(new NullProgressMonitor());
return project;
}
private IProject createProjectInSelectionSpace(String name, File location, String rootWorkspaceLocation) throws CoreException {
IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
IProject project = root.getProject(name);
boolean isParentMMM = true;
IProject parentProject = null;
/*Parent project selection process may be failed due to some reason but still project creation process can be
proceed that's why this exception handled and logged*/
try {
File parentFile = location.getParentFile();
String parentName = parentFile.getName();
parentProject = root.getProject(parentName);
if (parentProject != null && !parentProject.hasNature(Constants.MAVEN_MULTI_MODULE_PROJECT_NATURE)) {
String newlocation = parentFile.getParent() + File.separator + name;
location = new File(newlocation);
getModel().setLocation(location);
isParentMMM = false;
}
} catch (CoreException e) {
log.warn("Cannot create project in selected location ", e);
return createProjectInDefaultWorkspace(name);
}
IProjectDescription newProjectDescription = project.getWorkspace().newProjectDescription(name);
newProjectDescription.setLocationURI(location.toURI());
project.create(newProjectDescription, new NullProgressMonitor());
project.open(new NullProgressMonitor());
if (isParentMMM) {
try {
updateMMMPModuleList(name, parentProject);
} catch (IOException e) {
log.error("Error occured while adding " + name + " to module list.", e);
} catch (XmlPullParserException e) {
log.error("Error occured while adding " + name
+ " to module list. due to parent pom file parser issue", e);
}
} else {
while (!rootWorkspaceLocation.equals(location.getPath())) {
File parentFile = location.getParentFile();
String parentName = parentFile.getName();
parentProject = root.getProject(parentName);
if (parentProject != null && parentProject.hasNature(Constants.MAVEN_MULTI_MODULE_PROJECT_NATURE)) {
try {
updateMMMPModuleList(name, parentProject);
} catch (IOException e) {
log.error("Error occured while adding " + name + " to module list.", e);
} catch (XmlPullParserException e) {
log.error("Error occured while adding " + name
+ " to module list. due to parent pom file parser issue", e);
}
break;
}
}
}
return project;
}
private IProject createProjectInDefaultWorkspace(String name) throws CoreException {
IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
IProject project = root.getProject(name);
project.create(new NullProgressMonitor());
project.open(new NullProgressMonitor());
return project;
}
/**
* This method is used to update the module list of Maven multi module
* project upon a project creation under MMM project. Fixed TOOLS-1492
*
* @param name
* @param location
* @param root
* @throws CoreException
* @throws IOException
* @throws XmlPullParserException
* @throws Exception
*/
private void updateMMMPModuleList(String name, IProject parentProject) throws CoreException, IOException,
XmlPullParserException {
IFile pomFile = parentProject.getFile("pom.xml");
if (pomFile.exists()) {
MavenProject mavenProject = MavenUtils.getMavenProject(pomFile.getLocation().toFile());
mavenProject.getModules().add(name);
List<String> modules = mavenProject.getModules();
List<String> sortedModuleList = getSortedModuleList(modules, parentProject);
mavenProject.getModules().clear();
mavenProject.getModules().addAll(sortedModuleList);
MavenUtils.saveMavenProject(mavenProject, pomFile.getLocation().toFile());
parentProject.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor());
}
}
private List<String> getSortedModuleList(List<String> moduleList, IProject parentProject) {
List<IProject> projectList = new ArrayList<IProject>();
List<String> nonProjectModuleList = new ArrayList<String>();
List<String> sortedModuleList = new ArrayList<String>();
for (String string : moduleList) {
IProject projectFromModule = getProjectFromModule(string);
if (projectFromModule != null) {
projectList.add(projectFromModule);
} else {
nonProjectModuleList.add(string);
}
}
projectList = sortProjects(projectList);
for (IProject iProject : projectList) {
if(iProject!=null && iProject.exists() && iProject.isOpen()){
String relativePath = FileUtils.getRelativePath(parentProject.getLocation().toFile(),
iProject.getLocation().toFile()).replaceAll(Pattern.quote(File.separator), "/");
sortedModuleList.add(relativePath);
}
}
sortedModuleList.addAll(nonProjectModuleList);
return sortedModuleList;
}
private IProject getProjectFromModule(String moduleName) {
String[] split = moduleName.split(Pattern.quote("/"));
return ResourcesPlugin.getWorkspace().getRoot().getProject(split[split.length - 1]);
}
public void createPOM(File pomLocation) throws Exception {
MavenInfo mavenInfo = getModel().getMavenInfo();
String customGroupId = preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
GLOBAL_MAVEN_GROUP_ID, null, null);
String customVersion = preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
GLOBAL_MAVEN_VERSION, null, null);
MavenProject mavenProject = MavenUtils.createMavenProject(
customGroupId != null ? customGroupId : mavenInfo.getGroupId(), mavenInfo.getArtifactId(),
customVersion != null ? customVersion : mavenInfo.getVersion(), mavenInfo.getPackageName());
Parent parentProject = getModel().getMavenInfo().getParentProject();
if (parentProject != null) {
mavenProject.getModel().setParent(parentProject);
}
String disableWSO2Repo = preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
DISABLE_WSO2_REPOSITORY, null, null);
if (disableWSO2Repo == null) {
MavenUtils.updateMavenRepo(mavenProject);
}
Repository globalRepositoryFromPreference = getGlobalRepositoryFromPreference();
if (globalRepositoryFromPreference != null) {
mavenProject.getModel().addRepository(globalRepositoryFromPreference);
mavenProject.getModel().addPluginRepository(globalRepositoryFromPreference);
}
MavenUtils.saveMavenProject(mavenProject, pomLocation);
}
public void createPOM(File pomLocation, String packagingType) throws Exception {
MavenInfo mavenInfo = getModel().getMavenInfo();
String customGroupId = preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
GLOBAL_MAVEN_GROUP_ID, null, null);
String customVersion = preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
GLOBAL_MAVEN_VERSION, null, null);
MavenProject mavenProject = MavenUtils.createMavenProject(
customGroupId != null ? customGroupId : mavenInfo.getGroupId(), mavenInfo.getArtifactId(),
customVersion != null ? customVersion : mavenInfo.getVersion(), packagingType);
Parent parentProject = getModel().getMavenInfo().getParentProject();
if (parentProject != null) {
mavenProject.getModel().setParent(parentProject);
}
String disableWSO2Repo = preferencesService.getString("org.wso2.developerstudio.eclipse.platform.ui",
DISABLE_WSO2_REPOSITORY, null, null);
if (disableWSO2Repo == null) {
MavenUtils.updateMavenRepo(mavenProject);
}
Repository globalRepositoryFromPreference = getGlobalRepositoryFromPreference();
if (globalRepositoryFromPreference != null) {
mavenProject.getModel().addRepository(globalRepositoryFromPreference);
mavenProject.getModel().addPluginRepository(globalRepositoryFromPreference);
}
MavenUtils.saveMavenProject(mavenProject, pomLocation);
}
public String getMavenGroupId(File pomLocation) {
String groupId = "org.wso2.carbon";
if (pomLocation != null && pomLocation.exists()) {
try {
MavenProject mavenProject = MavenUtils.getMavenProject(pomLocation);
groupId = mavenProject.getGroupId();
} catch (Exception e) {
log.error("error reading pom file", e);
}
}
return groupId;
}
public void refreshDistProjects() {
try {
IEditorReference[] editorReferences = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage()
.getEditorReferences();
for (IEditorReference reference : editorReferences) {
if (DIST_EDITOR_ID.equals(reference.getId())) {
IEditorPart editor = reference.getEditor(false);
if (editor instanceof Refreshable) {
Refreshable refreshable = (Refreshable) editor;
refreshable.refresh();
}
}
}
} catch (Exception e) {
log.warn("Cannot refresh Carbon application project list", e);
}
}
public void openEditor(File file) {
IFile artifact = null;
if (file != null) {
try {
refreshDistProjects();
artifact = ResourcesPlugin.getWorkspace().getRoot()
.getFileForLocation(Path.fromOSString(file.getAbsolutePath()));
IDE.openEditor(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage(), artifact);
} catch (Exception e) {
log.warn("Cannot open resource '" + file.getName() + "' in it's associated editor", e);
}
}
}
public void setCurrentSelection(ISelection currentSelection) {
this.currentSelection = currentSelection;
}
public ISelection getCurrentSelection() {
return currentSelection;
}
public abstract IResource getCreatedResource();
protected List<IProject> sortProjects(List<IProject> projects) {
try {
List<IProject> distributionProjects = new ArrayList<IProject>();
List<IProject> projectList = new ArrayList<IProject>();
for (IProject iProject : projects) {
if (iProject.hasNature(Constants.DISTRIBUTION_PROJECT_NATURE)) {
distributionProjects.add(iProject);
} else {
projectList.add(iProject);
}
}
projects = projectList;
for (IProject iProject : distributionProjects) {
projectList.add(iProject);
}
} catch (CoreException e) {
log.warn("Project list cannot be sorted", e);
}
return projects;
}
}
|
|
/**
* Copyright (C) 2012-2014 Dell, Inc
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.google.compute.server;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.services.compute.Compute;
import com.google.api.services.compute.model.*;
import org.apache.log4j.Logger;
import org.dasein.cloud.*;
import org.dasein.cloud.compute.*;
import org.dasein.cloud.compute.Snapshot;
import org.dasein.cloud.google.Google;
import org.dasein.cloud.google.GoogleException;
import org.dasein.cloud.google.GoogleMethod;
import org.dasein.cloud.google.GoogleOperationType;
import org.dasein.cloud.google.capabilities.GCESnapshotCapabilities;
import org.dasein.cloud.util.APITrace;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.*;
/**
* Implements the snapshot services supported in the Google API.
* @author Drew Lyall
* @version 2014.03 initial version
* @since 2014.03
*/
public class SnapshotSupport extends AbstractSnapshotSupport{
static private final Logger logger = Google.getLogger(SnapshotSupport.class);
private Google provider;
public SnapshotSupport(Google provider){
super(provider);
this.provider = provider;
}
@Override
public void addSnapshotShare(@Nonnull String providerSnapshotId, @Nonnull String accountNumber) throws CloudException, InternalException{
throw new OperationNotSupportedException("Google does not support sharing a snapshot across accounts.");
}
@Override
public void addPublicShare(@Nonnull String providerSnapshotId) throws CloudException, InternalException{
throw new OperationNotSupportedException("Google does not support sharing a snapshot across accounts.");
}
@Override
public String createSnapshot(@Nonnull SnapshotCreateOptions options) throws CloudException, InternalException{
APITrace.begin(provider, "Snapshot.createSnapshot");
try{
Compute gce = provider.getGoogleCompute();
try{
Volume volume = provider.getComputeServices().getVolumeSupport().getVolume(options.getVolumeId());
com.google.api.services.compute.model.Snapshot snapshot = new com.google.api.services.compute.model.Snapshot();
snapshot.setName(options.getName());
snapshot.setDescription(options.getDescription());
snapshot.setSourceDiskId(options.getVolumeId());
Operation job = gce.disks().createSnapshot(provider.getContext().getAccountNumber(), volume.getProviderDataCenterId(), options.getVolumeId(), snapshot).execute();
GoogleMethod method = new GoogleMethod(provider);
if(method.getOperationComplete(provider.getContext(), job, GoogleOperationType.ZONE_OPERATION, "", volume.getProviderDataCenterId())){
SnapshotList snapshots = gce.snapshots().list(provider.getContext().getAccountNumber()).setFilter("name eq " + options.getName()).execute();
for(com.google.api.services.compute.model.Snapshot s : snapshots.getItems()){
if(s.getName().equals(options.getName()))return s.getName();
}
}
throw new CloudException("An error occurred creating the snapshot: Operation Timedout");
} catch (IOException ex) {
logger.error(ex.getMessage());
if (ex.getClass() == GoogleJsonResponseException.class) {
GoogleJsonResponseException gjre = (GoogleJsonResponseException)ex;
throw new GoogleException(CloudErrorType.GENERAL, gjre.getStatusCode(), gjre.getContent(), gjre.getDetails().getMessage());
} else
throw new CloudException("An error occurred creating the snapshot: " + ex.getMessage());
} catch (Exception ex) {
throw new OperationNotSupportedException("Copying snapshots is not supported in GCE");
}
}
finally {
APITrace.end();
}
}
private transient volatile GCESnapshotCapabilities capabilities;
@Override
public @Nonnull GCESnapshotCapabilities getCapabilities(){
if(capabilities == null){
capabilities = new GCESnapshotCapabilities(provider);
}
return capabilities;
}
@Override
public @Nonnull String getProviderTermForSnapshot(@Nonnull Locale locale){
return "snapshot";
}
@Override
public Snapshot getSnapshot(@Nonnull String snapshotId) throws InternalException, CloudException{
APITrace.begin(provider, "Snapshot.getSnapshot");
try{
Compute gce = provider.getGoogleCompute();
try{
com.google.api.services.compute.model.Snapshot snapshot = gce.snapshots().get(provider.getContext().getAccountNumber(), snapshotId).execute();
return toSnapshot(snapshot);
} catch (IOException ex) {
if ((ex.getMessage() != null) && (ex.getMessage().contains("404 Not Found"))) // not found.
return null;
logger.error(ex.getMessage());
if (ex.getClass() == GoogleJsonResponseException.class) {
GoogleJsonResponseException gjre = (GoogleJsonResponseException)ex;
throw new GoogleException(CloudErrorType.GENERAL, gjre.getStatusCode(), gjre.getContent(), gjre.getDetails().getMessage());
} else
throw new CloudException("An error occurred getting the snapshot: " + ex.getMessage());
}
}
finally {
APITrace.end();
}
}
@Override
public boolean isPublic(@Nonnull String snapshotId) throws InternalException, CloudException{
return false;
}
@Override
public boolean isSubscribed() throws InternalException, CloudException{
return true;
}
@Override
public @Nonnull Iterable<String> listShares(@Nonnull String snapshotId) throws InternalException, CloudException{
return Collections.emptyList();
}
@Override
public @Nonnull Iterable<ResourceStatus> listSnapshotStatus() throws InternalException, CloudException{
APITrace.begin(provider, "Snapshot.listSnapshotStatus");
try{
ArrayList<ResourceStatus> statuses = new ArrayList<ResourceStatus>();
Compute gce = provider.getGoogleCompute();
try{
SnapshotList list = gce.snapshots().list(provider.getContext().getAccountNumber()).execute();
if(list != null && list.size() > 0){
for(com.google.api.services.compute.model.Snapshot googleSnapshot : list.getItems()){
ResourceStatus status = toStatus(googleSnapshot);
if(status != null)statuses.add(status);
}
}
return statuses;
} catch (IOException ex) {
logger.error(ex.getMessage());
if (ex.getClass() == GoogleJsonResponseException.class) {
GoogleJsonResponseException gjre = (GoogleJsonResponseException)ex;
throw new GoogleException(CloudErrorType.GENERAL, gjre.getStatusCode(), gjre.getContent(), gjre.getDetails().getMessage());
} else
throw new CloudException("An error occurred retrieving snapshot status");
}
}
finally {
APITrace.end();
}
}
@Override
public @Nonnull Iterable<Snapshot> listSnapshots() throws InternalException, CloudException{
APITrace.begin(provider, "Snapshot.listSnapshots");
try{
ArrayList<Snapshot> snapshots = new ArrayList<Snapshot>();
Compute gce = provider.getGoogleCompute();
try{
SnapshotList list = gce.snapshots().list(provider.getContext().getAccountNumber()).execute();
if(list != null && list.getItems() != null && list.getItems().size() > 0){
for(com.google.api.services.compute.model.Snapshot googleSnapshot : list.getItems()){
Snapshot snapshot = toSnapshot(googleSnapshot);
if(snapshot != null)snapshots.add(snapshot);
}
}
return snapshots;
} catch (IOException ex) {
logger.error(ex.getMessage());
if (ex.getClass() == GoogleJsonResponseException.class) {
GoogleJsonResponseException gjre = (GoogleJsonResponseException)ex;
throw new GoogleException(CloudErrorType.GENERAL, gjre.getStatusCode(), gjre.getContent(), gjre.getDetails().getMessage());
} else
throw new CloudException("An error occurred while listing snapshots: " + ex.getMessage());
}
}
finally {
APITrace.end();
}
}
@Override
public @Nonnull Iterable<Snapshot> listSnapshots(SnapshotFilterOptions options) throws InternalException, CloudException{
return searchSnapshots(options);
}
@Override
public void remove(@Nonnull String snapshotId) throws InternalException, CloudException{
APITrace.begin(provider, "Snapshot.remove");
try{
Compute gce = provider.getGoogleCompute();
try{
Operation job = gce.snapshots().delete(provider.getContext().getAccountNumber(), snapshotId).execute();
GoogleMethod method = new GoogleMethod(provider);
if(!method.getOperationComplete(provider.getContext(), job, GoogleOperationType.GLOBAL_OPERATION, "", "")){
throw new CloudException("An error occurred deleting the snapshot: Operation timed out");
}
} catch (IOException ex) {
if (ex.getClass() == GoogleJsonResponseException.class) {
logger.error(ex.getMessage());
GoogleJsonResponseException gjre = (GoogleJsonResponseException)ex;
throw new GoogleException(CloudErrorType.GENERAL, gjre.getStatusCode(), gjre.getContent(), gjre.getDetails().getMessage());
} else
throw new CloudException("An error occurred deleting the snapshot: " + ex.getMessage());
}
}
finally {
APITrace.end();
}
}
@Override
public void removeAllSnapshotShares(@Nonnull String providerSnapshotId) throws CloudException, InternalException{
throw new OperationNotSupportedException("GCE does not support snapshot sharing");
}
@Override
public void removeSnapshotShare(@Nonnull String providerSnapshotId, @Nonnull String accountNumber) throws CloudException, InternalException{
throw new OperationNotSupportedException("Google does not support sharing/unsharing a snapshot across accounts.");
}
@Override
public void removePublicShare(@Nonnull String providerSnapshotId) throws CloudException, InternalException{
throw new OperationNotSupportedException("Google does not support sharing/unsharing a snapshot across accounts.");
}
@Override
public void removeTags(@Nonnull String snapshotId, @Nonnull Tag... tags) throws CloudException, InternalException{
throw new OperationNotSupportedException("Google snapshot does not contain meta data");
}
@Override
public void removeTags(@Nonnull String[] snapshotIds, @Nonnull Tag... tags) throws CloudException, InternalException{
throw new OperationNotSupportedException("Google snapshot does not contain meta data");
}
@Override
public @Nonnull Iterable<Snapshot> searchSnapshots(@Nonnull SnapshotFilterOptions options) throws InternalException, CloudException{
APITrace.begin(provider, "Snapshot.searchSnapshots");
try{
ArrayList<Snapshot> snapshots = new ArrayList<Snapshot>();
for(Snapshot snapshot : listSnapshots()){
if(options == null || options.matches(snapshot, null)){
snapshots.add(snapshot);
}
}
return snapshots;
}
finally {
APITrace.end();
}
}
@Override
public void updateTags(@Nonnull String snapshotId, @Nonnull Tag... tags) throws CloudException, InternalException{
throw new OperationNotSupportedException("Google snapshot does not contain meta data");
}
@Override
public void updateTags(@Nonnull String[] snapshotIds, @Nonnull Tag... tags) throws CloudException, InternalException{
throw new OperationNotSupportedException("Google snapshot does not contain meta data");
}
private @Nullable Snapshot toSnapshot(com.google.api.services.compute.model.Snapshot googleSnapshot){
Snapshot snapshot = new Snapshot();
snapshot.setProviderSnapshotId(googleSnapshot.getName());
snapshot.setName(googleSnapshot.getName());
snapshot.setDescription(googleSnapshot.getDescription());
snapshot.setOwner(provider.getContext().getAccountNumber());
SnapshotState state = SnapshotState.PENDING;
if(googleSnapshot.getStatus().equals("READY"))state = SnapshotState.AVAILABLE;
else if(googleSnapshot.getStatus().equals("DELETING"))state = SnapshotState.DELETED;
snapshot.setCurrentState(state);
//TODO: Set visible scope for snapshots
snapshot.setSizeInGb(googleSnapshot.getDiskSizeGb().intValue());
DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
DateTime dt = DateTime.parse(googleSnapshot.getCreationTimestamp(), fmt);
snapshot.setSnapshotTimestamp(dt.toDate().getTime());
String sourceDisk = googleSnapshot.getSourceDisk();
if (sourceDisk != null) {
snapshot.setVolumeId(sourceDisk.substring(sourceDisk.lastIndexOf("/") + 1));
}
return snapshot;
}
private @Nullable ResourceStatus toStatus(@Nullable com.google.api.services.compute.model.Snapshot snapshot) throws CloudException {
SnapshotState state;
if(snapshot.getStatus().equals("READY")){
state = SnapshotState.AVAILABLE;
}
else if(snapshot.getStatus().equals("DELETING")){
state = SnapshotState.DELETED;
}
else state = SnapshotState.PENDING;
return new ResourceStatus(snapshot.getName(), state);
}
}
|
|
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.stringoperations;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.steps.stringoperations.StringOperationsMeta;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.widget.ColumnInfo;
import org.pentaho.di.ui.core.widget.TableView;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
import org.pentaho.di.ui.trans.step.TableItemInsertListener;
/**
* Dialog class for the StringOperations step.
*
* @author Samatar Hassan
* @since 02 April 2009
*/
public class StringOperationsDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = StringOperationsMeta.class; // for i18n purposes, needed by Translator2!!
private Label wlKey;
private TableView wFields;
private FormData fdlKey, fdKey;
private StringOperationsMeta input;
// holds the names of the fields entering this step
private Map<String, Integer> inputFields;
private ColumnInfo[] ciKey;
public StringOperationsDialog( Shell parent, Object in, TransMeta tr, String sname ) {
super( parent, (BaseStepMeta) in, tr, sname );
input = (StringOperationsMeta) in;
inputFields = new HashMap<String, Integer>();
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN );
props.setLook( shell );
setShellImage( shell, input );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
input.setChanged();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "StringOperationsDialog.Shell.Title" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "StringOperationsDialog.Stepname.Label" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.right = new FormAttachment( middle, -margin );
fdlStepname.top = new FormAttachment( 0, margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
wlKey = new Label( shell, SWT.NONE );
wlKey.setText( BaseMessages.getString( PKG, "StringOperationsDialog.Fields.Label" ) );
props.setLook( wlKey );
fdlKey = new FormData();
fdlKey.left = new FormAttachment( 0, 0 );
fdlKey.top = new FormAttachment( wStepname, 2 * margin );
wlKey.setLayoutData( fdlKey );
int nrFieldCols = 11;
int nrFieldRows = ( input.getFieldInStream() != null ? input.getFieldInStream().length : 1 );
ciKey = new ColumnInfo[nrFieldCols];
ciKey[0] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.InStreamField" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "" }, false );
ciKey[1] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.OutStreamField" ),
ColumnInfo.COLUMN_TYPE_TEXT, false );
ciKey[2] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.Trim" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, StringOperationsMeta.trimTypeDesc, true );
ciKey[3] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.LowerUpper" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, StringOperationsMeta.lowerUpperDesc, true );
ciKey[4] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.Padding" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, StringOperationsMeta.paddingDesc, true );
ciKey[5] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.CharPad" ),
ColumnInfo.COLUMN_TYPE_TEXT, false );
ciKey[6] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.LenPad" ),
ColumnInfo.COLUMN_TYPE_TEXT, false );
ciKey[7] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.InitCap" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, StringOperationsMeta.initCapDesc );
ciKey[8] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.MaskXML" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, StringOperationsMeta.maskXMLDesc );
ciKey[9] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.Digits" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, StringOperationsMeta.digitsDesc );
ciKey[10] =
new ColumnInfo(
BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.RemoveSpecialCharacters" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, StringOperationsMeta.removeSpecialCharactersDesc );
ciKey[1]
.setToolTip( BaseMessages.getString( PKG, "StringOperationsDialog.ColumnInfo.OutStreamField.Tooltip" ) );
ciKey[1].setUsingVariables( true );
ciKey[4].setUsingVariables( true );
ciKey[5].setUsingVariables( true );
ciKey[6].setUsingVariables( true );
ciKey[7].setUsingVariables( true );
wFields =
new TableView(
transMeta, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL, ciKey,
nrFieldRows, lsMod, props );
fdKey = new FormData();
fdKey.left = new FormAttachment( 0, 0 );
fdKey.top = new FormAttachment( wlKey, margin );
fdKey.right = new FormAttachment( 100, -margin );
fdKey.bottom = new FormAttachment( 100, -30 );
wFields.setLayoutData( fdKey );
// THE BUTTONS
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
wGet = new Button( shell, SWT.PUSH );
wGet.setText( BaseMessages.getString( PKG, "StringOperationsDialog.GetFields.Button" ) );
fdGet = new FormData();
fdGet.right = new FormAttachment( 100, 0 );
fdGet.top = new FormAttachment( wStepname, 3 * middle );
wGet.setLayoutData( fdGet );
setButtonPositions( new Button[] { wOK, wGet, wCancel }, margin, null );
// Add listeners
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
lsGet = new Listener() {
public void handleEvent( Event e ) {
get();
}
};
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
wOK.addListener( SWT.Selection, lsOK );
wGet.addListener( SWT.Selection, lsGet );
wCancel.addListener( SWT.Selection, lsCancel );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
// Set the shell size, based upon previous time...
setSize();
getData();
//
// Search the fields in the background
//
final Runnable runnable = new Runnable() {
public void run() {
StepMeta stepMeta = transMeta.findStep( stepname );
if ( stepMeta != null ) {
try {
RowMetaInterface row = transMeta.getPrevStepFields( stepMeta );
if ( row != null ) {
// Remember these fields...
for ( int i = 0; i < row.size(); i++ ) {
inputFields.put( row.getValueMeta( i ).getName(), new Integer( i ) );
}
setComboBoxes();
}
// Dislay in red missing field names
Display.getDefault().asyncExec( new Runnable() {
public void run() {
if ( !wFields.isDisposed() ) {
for ( int i = 0; i < wFields.table.getItemCount(); i++ ) {
TableItem it = wFields.table.getItem( i );
if ( !Utils.isEmpty( it.getText( 1 ) ) ) {
if ( !inputFields.containsKey( it.getText( 1 ) ) ) {
it.setBackground( GUIResource.getInstance().getColorRed() );
}
}
}
}
}
} );
} catch ( KettleException e ) {
logError( "Error getting fields from incoming stream!", e );
}
}
}
};
new Thread( runnable ).start();
input.setChanged( changed );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
protected void setComboBoxes() {
Set<String> keySet = inputFields.keySet();
List<String> entries = new ArrayList<String>( keySet );
String[] fieldNames = entries.toArray( new String[entries.size()] );
Const.sortStrings( fieldNames );
ciKey[0].setComboValues( fieldNames );
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
if ( input.getFieldInStream() != null ) {
for ( int i = 0; i < input.getFieldInStream().length; i++ ) {
TableItem item = wFields.table.getItem( i );
if ( input.getFieldInStream()[i] != null ) {
item.setText( 1, input.getFieldInStream()[i] );
}
if ( input.getFieldOutStream()[i] != null ) {
item.setText( 2, input.getFieldOutStream()[i] );
}
item.setText( 3, StringOperationsMeta.getTrimTypeDesc( input.getTrimType()[i] ) );
item.setText( 4, StringOperationsMeta.getLowerUpperDesc( input.getLowerUpper()[i] ) );
item.setText( 5, StringOperationsMeta.getPaddingDesc( input.getPaddingType()[i] ) );
if ( input.getPadChar()[i] != null ) {
item.setText( 6, input.getPadChar()[i] );
}
if ( input.getPadLen()[i] != null ) {
item.setText( 7, input.getPadLen()[i] );
}
item.setText( 8, StringOperationsMeta.getInitCapDesc( input.getInitCap()[i] ) );
item.setText( 9, StringOperationsMeta.getMaskXMLDesc( input.getMaskXML()[i] ) );
item.setText( 10, StringOperationsMeta.getDigitsDesc( input.getDigits()[i] ) );
item.setText( 11, StringOperationsMeta
.getRemoveSpecialCharactersDesc( input.getRemoveSpecialCharacters()[i] ) );
}
}
wFields.setRowNums();
wFields.optWidth( true );
wStepname.selectAll();
wStepname.setFocus();
}
private void cancel() {
stepname = null;
input.setChanged( changed );
dispose();
}
private void getInfo( StringOperationsMeta inf ) {
int nrkeys = wFields.nrNonEmpty();
inf.allocate( nrkeys );
if ( isDebug() ) {
logDebug( BaseMessages.getString( PKG, "StringOperationsDialog.Log.FoundFields", String.valueOf( nrkeys ) ) );
}
//CHECKSTYLE:Indentation:OFF
for ( int i = 0; i < nrkeys; i++ ) {
TableItem item = wFields.getNonEmpty( i );
inf.getFieldInStream()[i] = item.getText( 1 );
inf.getFieldOutStream()[i] = item.getText( 2 );
inf.getTrimType()[i] = StringOperationsMeta.getTrimTypeByDesc( item.getText( 3 ) );
inf.getLowerUpper()[i] = StringOperationsMeta.getLowerUpperByDesc( item.getText( 4 ) );
inf.getPaddingType()[i] = StringOperationsMeta.getPaddingByDesc( item.getText( 5 ) );
inf.getPadChar()[i] = item.getText( 6 );
inf.getPadLen()[i] = item.getText( 7 );
inf.getInitCap()[i] = StringOperationsMeta.getInitCapByDesc( item.getText( 8 ) );
inf.getMaskXML()[i] = StringOperationsMeta.getMaskXMLByDesc( item.getText( 9 ) );
inf.getDigits()[i] = StringOperationsMeta.getDigitsByDesc( item.getText( 10 ) );
inf.getRemoveSpecialCharacters()[i] = StringOperationsMeta.getRemoveSpecialCharactersByDesc( item.getText( 11 ) );
}
stepname = wStepname.getText(); // return value
}
private void ok() {
if ( Utils.isEmpty( wStepname.getText() ) ) {
return;
}
// Get the information for the dialog into the input structure.
getInfo( input );
dispose();
}
private void get() {
try {
RowMetaInterface r = transMeta.getPrevStepFields( stepname );
if ( r != null ) {
TableItemInsertListener listener = new TableItemInsertListener() {
public boolean tableItemInserted( TableItem tableItem, ValueMetaInterface v ) {
if ( v.getType() == ValueMetaInterface.TYPE_STRING ) {
// Only process strings
tableItem.setText( 3, BaseMessages.getString( PKG, "StringOperationsMeta.TrimType.None" ) );
tableItem.setText( 4, BaseMessages.getString( PKG, "StringOperationsMeta.LowerUpper.None" ) );
tableItem.setText( 5, BaseMessages.getString( PKG, "StringOperationsMeta.Padding.None" ) );
tableItem.setText( 8, BaseMessages.getString( PKG, "System.Combo.No" ) );
tableItem.setText( 9, BaseMessages.getString( PKG, "StringOperationsMeta.MaskXML.None" ) );
tableItem.setText( 10, BaseMessages.getString( PKG, "StringOperationsMeta.Digits.None" ) );
tableItem.setText( 11, BaseMessages.getString(
PKG, "StringOperationsMeta.RemoveSpecialCharacters.None" ) );
return true;
} else {
return false;
}
}
};
BaseStepDialog.getFieldsFromPrevious( r, wFields, 1, new int[] { 1 }, new int[] {}, -1, -1, listener );
}
} catch ( KettleException ke ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "StringOperationsDialog.FailedToGetFields.DialogTitle" ),
BaseMessages.getString( PKG, "StringOperationsDialog.FailedToGetFields.DialogMessage" ), ke );
}
}
}
|
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.testFramework;
import com.intellij.configurationStore.StateStorageManagerKt;
import com.intellij.configurationStore.StoreReloadManager;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.*;
import com.intellij.execution.actions.ConfigurationContext;
import com.intellij.execution.actions.ConfigurationFromContext;
import com.intellij.execution.actions.RunConfigurationProducer;
import com.intellij.execution.configurations.ConfigurationFactory;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.configurations.RunConfiguration;
import com.intellij.execution.executors.DefaultRunExecutor;
import com.intellij.execution.process.CapturingProcessAdapter;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.process.ProcessOutput;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.runners.ProgramRunner;
import com.intellij.execution.ui.RunContentDescriptor;
import com.intellij.execution.util.ExecUtil;
import com.intellij.ide.DataManager;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.fileTemplates.FileTemplateManager;
import com.intellij.ide.fileTemplates.impl.FileTemplateManagerImpl;
import com.intellij.ide.util.treeView.AbstractTreeBuilder;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.ide.util.treeView.AbstractTreeStructure;
import com.intellij.ide.util.treeView.AbstractTreeUi;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.extensions.ProjectExtensionPointName;
import com.intellij.openapi.extensions.impl.ExtensionPointImpl;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.impl.LoadTextUtil;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.paths.WebReference;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ex.ProjectManagerEx;
import com.intellij.openapi.ui.Queryable;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.ex.temp.TempFileSystem;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.resolve.reference.impl.PsiMultiReference;
import com.intellij.rt.execution.junit.FileComparisonFailure;
import com.intellij.testFramework.fixtures.IdeaTestExecutionPolicy;
import com.intellij.ui.tree.AsyncTreeModel;
import com.intellij.util.*;
import com.intellij.util.concurrency.AppExecutorUtil;
import com.intellij.util.concurrency.AppScheduledExecutorService;
import com.intellij.util.io.Decompressor;
import com.intellij.util.lang.JavaVersion;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import gnu.trove.Equality;
import junit.framework.AssertionFailedError;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.concurrency.AsyncPromise;
import org.jetbrains.concurrency.Promise;
import javax.swing.*;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.InvocationEvent;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.List;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.jar.JarFile;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.Assert.*;
/**
* @author yole
*/
@SuppressWarnings({"UseOfSystemOutOrSystemErr", "TestOnlyProblems"})
public class PlatformTestUtil {
private static final Logger LOG = Logger.getInstance(PlatformTestUtil.class);
public static final boolean COVERAGE_ENABLED_BUILD = "true".equals(System.getProperty("idea.coverage.enabled.build"));
private static final List<Runnable> ourProjectCleanups = new CopyOnWriteArrayList<>();
private static final long MAX_WAIT_TIME = TimeUnit.MINUTES.toMillis(2);
@NotNull
public static String getTestName(@NotNull String name, boolean lowercaseFirstLetter) {
name = StringUtil.trimStart(name, "test");
return StringUtil.isEmpty(name) ? "" : lowercaseFirstLetter(name, lowercaseFirstLetter);
}
@NotNull
public static String lowercaseFirstLetter(@NotNull String name, boolean lowercaseFirstLetter) {
if (lowercaseFirstLetter && !isAllUppercaseName(name)) {
name = Character.toLowerCase(name.charAt(0)) + name.substring(1);
}
return name;
}
public static boolean isAllUppercaseName(@NotNull String name) {
int uppercaseChars = 0;
for (int i = 0; i < name.length(); i++) {
if (Character.isLowerCase(name.charAt(i))) {
return false;
}
if (Character.isUpperCase(name.charAt(i))) {
uppercaseChars++;
}
}
return uppercaseChars >= 3;
}
/**
* @see ExtensionPointImpl#maskAll(List, Disposable, boolean)
*/
public static <T> void maskExtensions(@NotNull ProjectExtensionPointName<T> pointName,
@NotNull Project project,
@NotNull List<T> newExtensions,
@NotNull Disposable parentDisposable) {
((ExtensionPointImpl<T>)pointName.getPoint(project)).maskAll(newExtensions, parentDisposable, true);
}
@Nullable
public static String toString(@Nullable Object node, @Nullable Queryable.PrintInfo printInfo) {
if (node instanceof AbstractTreeNode) {
if (printInfo != null) {
return ((AbstractTreeNode<?>)node).toTestString(printInfo);
}
else {
//noinspection deprecation
return ((AbstractTreeNode<?>)node).getTestPresentation();
}
}
return String.valueOf(node);
}
public static String print(JTree tree, boolean withSelection) {
return print(tree, new TreePath(tree.getModel().getRoot()), withSelection, null, null);
}
public static String print(JTree tree, TreePath path, @Nullable Queryable.PrintInfo printInfo, boolean withSelection) {
return print(tree, path, withSelection, printInfo, null);
}
public static String print(JTree tree, boolean withSelection, @Nullable Predicate<? super String> nodePrintCondition) {
return print(tree, new TreePath(tree.getModel().getRoot()), withSelection, null, nodePrintCondition);
}
private static String print(JTree tree, TreePath path,
boolean withSelection,
@Nullable Queryable.PrintInfo printInfo,
@Nullable Predicate<? super String> nodePrintCondition) {
return StringUtil.join(printAsList(tree, path, withSelection, printInfo, nodePrintCondition), "\n");
}
private static Collection<String> printAsList(JTree tree,
TreePath path,
boolean withSelection,
@Nullable Queryable.PrintInfo printInfo,
@Nullable Predicate<? super String> nodePrintCondition) {
Collection<String> strings = new ArrayList<>();
printImpl(tree, path, strings, 0, withSelection, printInfo, nodePrintCondition);
return strings;
}
private static void printImpl(JTree tree,
TreePath path,
Collection<? super String> strings,
int level,
boolean withSelection,
@Nullable Queryable.PrintInfo printInfo,
@Nullable Predicate<? super String> nodePrintCondition) {
Object pathComponent = path.getLastPathComponent();
Object userObject = TreeUtil.getUserObject(pathComponent);
String nodeText = toString(userObject, printInfo);
if (nodePrintCondition != null && !nodePrintCondition.test(nodeText)) {
return;
}
StringBuilder buff = new StringBuilder();
StringUtil.repeatSymbol(buff, ' ', level);
boolean expanded = tree.isExpanded(path);
int childCount = tree.getModel().getChildCount(pathComponent);
if (childCount > 0) {
buff.append(expanded ? "-" : "+");
}
boolean selected = tree.getSelectionModel().isPathSelected(path);
if (withSelection && selected) {
buff.append("[");
}
buff.append(nodeText);
if (withSelection && selected) {
buff.append("]");
}
strings.add(buff.toString());
if (expanded) {
for (int i = 0; i < childCount; i++) {
TreePath childPath = path.pathByAddingChild(tree.getModel().getChild(pathComponent, i));
printImpl(tree, childPath, strings, level + 1, withSelection, printInfo, nodePrintCondition);
}
}
}
public static void assertTreeEqual(JTree tree, @NonNls String expected) {
assertTreeEqual(tree, expected, false);
}
public static void assertTreeEqual(JTree tree, String expected, boolean checkSelected) {
String treeStringPresentation = print(tree, checkSelected);
assertEquals(expected.trim(), treeStringPresentation.trim());
}
public static void expand(JTree tree, int... rows) {
for (int row : rows) {
tree.expandRow(row);
waitWhileBusy(tree);
}
}
public static void expandAll(JTree tree) {
waitForPromise(TreeUtil.promiseExpandAll(tree));
}
private static long getMillisSince(long startTimeMillis) {
return System.currentTimeMillis() - startTimeMillis;
}
private static void assertMaxWaitTimeSince(long startTimeMillis) {
assertMaxWaitTimeSince(startTimeMillis, MAX_WAIT_TIME);
}
private static void assertMaxWaitTimeSince(long startTimeMillis, long timeout) {
long took = getMillisSince(startTimeMillis);
assert took <= timeout : String.format("the waiting takes too long. Expected to take no more than: %d ms but took: %d ms", timeout, took);
}
private static void assertDispatchThreadWithoutWriteAccess() {
assertDispatchThreadWithoutWriteAccess(ApplicationManager.getApplication());
}
private static void assertDispatchThreadWithoutWriteAccess(Application application) {
if (application != null) {
assert !application.isWriteAccessAllowed() : "do not wait under write action to avoid possible deadlock";
assert application.isDispatchThread();
}
else {
// do not check for write access in simple tests
assert EventQueue.isDispatchThread();
}
}
@SuppressWarnings("deprecation")
private static boolean isBusy(JTree tree, TreeModel model) {
UIUtil.dispatchAllInvocationEvents();
if (model instanceof AsyncTreeModel) {
AsyncTreeModel async = (AsyncTreeModel)model;
if (async.isProcessing()) return true;
UIUtil.dispatchAllInvocationEvents();
return async.isProcessing();
}
AbstractTreeBuilder builder = AbstractTreeBuilder.getBuilderFor(tree);
if (builder == null) return false;
AbstractTreeUi ui = builder.getUi();
if (ui == null) return false;
return ui.hasPendingWork();
}
public static void waitWhileBusy(JTree tree) {
waitWhileBusy(tree, tree.getModel());
}
public static void waitWhileBusy(JTree tree, TreeModel model) {
assertDispatchThreadWithoutWriteAccess();
long startTimeMillis = System.currentTimeMillis();
while (isBusy(tree, model)) {
assertMaxWaitTimeSince(startTimeMillis);
TimeoutUtil.sleep(5);
}
}
public static void waitForCallback(@NotNull ActionCallback callback) {
AsyncPromise<?> promise = new AsyncPromise<>();
callback.doWhenDone(() -> promise.setResult(null)).doWhenRejected(() -> promise.cancel());
waitForPromise(promise);
}
@Nullable
public static <T> T waitForPromise(@NotNull Promise<T> promise) {
return waitForPromise(promise, MAX_WAIT_TIME);
}
@Nullable
public static <T> T waitForPromise(@NotNull Promise<T> promise, long timeout) {
return waitForPromise(promise, timeout, false);
}
public static <T> T assertPromiseSucceeds(@NotNull Promise<T> promise) {
return waitForPromise(promise, MAX_WAIT_TIME, true);
}
@Nullable
private static <T> T waitForPromise(@NotNull Promise<T> promise, long timeout, boolean assertSucceeded) {
assertDispatchThreadWithoutWriteAccess();
long start = System.currentTimeMillis();
while (true) {
if (promise.getState() == Promise.State.PENDING) {
UIUtil.dispatchAllInvocationEvents();
}
try {
return promise.blockingGet(20, TimeUnit.MILLISECONDS);
}
catch (TimeoutException ignore) {
}
catch (Exception e) {
if (assertSucceeded) {
throw new AssertionError(e);
}
else {
return null;
}
}
assertMaxWaitTimeSince(start, timeout);
}
}
public static <T> T waitForFuture(@NotNull Future<T> future, long timeoutMillis) {
assertDispatchThreadWithoutWriteAccess();
long start = System.currentTimeMillis();
while (true) {
if (!future.isDone()) {
UIUtil.dispatchAllInvocationEvents();
}
try {
return future.get(10, TimeUnit.MILLISECONDS);
}
catch (TimeoutException ignore) {
}
catch (Exception e) {
throw new AssertionError(e);
}
assertMaxWaitTimeSince(start, timeoutMillis);
}
}
public static void waitForAlarm(final int delay) {
@NotNull Application app = ApplicationManager.getApplication();
assertDispatchThreadWithoutWriteAccess();
Disposable tempDisposable = Disposer.newDisposable();
final AtomicBoolean runnableInvoked = new AtomicBoolean();
final AtomicBoolean pooledRunnableInvoked = new AtomicBoolean();
final AtomicBoolean alarmInvoked1 = new AtomicBoolean();
final AtomicBoolean alarmInvoked2 = new AtomicBoolean();
final Alarm alarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD);
final Alarm pooledAlarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, tempDisposable);
ModalityState initialModality = ModalityState.current();
alarm.addRequest(() -> {
alarmInvoked1.set(true);
app.invokeLater(() -> {
runnableInvoked.set(true);
alarm.addRequest(() -> alarmInvoked2.set(true), delay);
});
}, delay);
pooledAlarm.addRequest(() -> pooledRunnableInvoked.set(true), delay);
UIUtil.dispatchAllInvocationEvents();
long start = System.currentTimeMillis();
try {
boolean sleptAlready = false;
while (!alarmInvoked2.get()) {
AtomicBoolean laterInvoked = new AtomicBoolean();
app.invokeLater(() -> laterInvoked.set(true));
UIUtil.dispatchAllInvocationEvents();
assertTrue(laterInvoked.get());
TimeoutUtil.sleep(sleptAlready ? 10 : delay);
sleptAlready = true;
if (getMillisSince(start) > MAX_WAIT_TIME) {
String queue = ((AppScheduledExecutorService)AppExecutorUtil.getAppScheduledExecutorService()).dumpQueue();
throw new AssertionError("Couldn't await alarm" +
"; alarm passed=" + alarmInvoked1.get() +
"; modality1=" + initialModality +
"; modality2=" + ModalityState.current() +
"; non-modal=" + (initialModality == ModalityState.NON_MODAL) +
"; invokeLater passed=" + runnableInvoked.get() +
"; pooled alarm passed=" + pooledRunnableInvoked.get() +
"; app.disposed=" + app.isDisposed() +
"; alarm.disposed=" + alarm.isDisposed() +
"; alarm.requests=" + alarm.getActiveRequestCount() +
"\n delayQueue=" + StringUtil.trimLog(queue, 1000) +
"\n invocatorEdtQueue=" + LaterInvocator.getLaterInvocatorEdtQueue() +
"\n invocatorWtQueue=" + LaterInvocator.getLaterInvocatorWtQueue()
);
}
}
}
finally {
Disposer.dispose(tempDisposable);
}
UIUtil.dispatchAllInvocationEvents();
}
/**
* Dispatch all pending invocation events (if any) in the {@link IdeEventQueue}, ignores and removes all other events from the queue.
* Should only be invoked in Swing thread (asserted inside {@link IdeEventQueue#dispatchEvent(AWTEvent)})
*/
public static void dispatchAllInvocationEventsInIdeEventQueue() {
IdeEventQueue eventQueue = IdeEventQueue.getInstance();
while (true) {
AWTEvent event = eventQueue.peekEvent();
if (event == null) break;
try {
event = eventQueue.getNextEvent();
if (event instanceof InvocationEvent) {
eventQueue.dispatchEvent(event);
}
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
/**
* Dispatch all pending events (if any) in the {@link IdeEventQueue}.
* Should only be invoked in Swing thread (asserted inside {@link IdeEventQueue#dispatchEvent(AWTEvent)})
*/
public static void dispatchAllEventsInIdeEventQueue() throws InterruptedException {
IdeEventQueue eventQueue = IdeEventQueue.getInstance();
//noinspection StatementWithEmptyBody
while (dispatchNextEventIfAny(eventQueue) != null);
}
/**
* Dispatch one pending event (if any) in the {@link IdeEventQueue}.
* Should only be invoked in Swing thread (asserted inside {@link IdeEventQueue#dispatchEvent(AWTEvent)})
*/
public static AWTEvent dispatchNextEventIfAny(@NotNull IdeEventQueue eventQueue) throws InterruptedException {
assert SwingUtilities.isEventDispatchThread() : Thread.currentThread();
AWTEvent event = eventQueue.peekEvent();
if (event == null) return null;
AWTEvent event1 = eventQueue.getNextEvent();
eventQueue.dispatchEvent(event1);
return event1;
}
public static StringBuilder print(AbstractTreeStructure structure, Object node, int currentLevel, @Nullable Comparator<?> comparator,
int maxRowCount, char paddingChar, @Nullable Queryable.PrintInfo printInfo) {
return print(structure, node, currentLevel, comparator, maxRowCount, paddingChar, o -> toString(o, printInfo));
}
public static String print(AbstractTreeStructure structure, Object node, Function<Object, String> nodePresenter) {
return print(structure, node, 0, Comparator.comparing(nodePresenter), -1, ' ', nodePresenter).toString();
}
private static StringBuilder print(AbstractTreeStructure structure, Object node, int currentLevel, @Nullable Comparator<?> comparator,
int maxRowCount, char paddingChar, Function<Object, String> nodePresenter) {
StringBuilder buffer = new StringBuilder();
doPrint(buffer, currentLevel, node, structure, comparator, maxRowCount, 0, paddingChar, nodePresenter);
return buffer;
}
private static int doPrint(StringBuilder buffer,
int currentLevel,
Object node,
AbstractTreeStructure structure,
@Nullable Comparator<?> comparator,
int maxRowCount,
int currentLine,
char paddingChar,
Function<Object, String> nodePresenter) {
if (currentLine >= maxRowCount && maxRowCount != -1) return currentLine;
StringUtil.repeatSymbol(buffer, paddingChar, currentLevel);
buffer.append(nodePresenter.apply(node)).append("\n");
currentLine++;
Object[] children = structure.getChildElements(node);
if (comparator != null) {
List<?> list = new ArrayList<>(Arrays.asList(children));
@SuppressWarnings({"unchecked"})
Comparator<Object> c = (Comparator<Object>)comparator;
Collections.sort(list, c);
children = ArrayUtil.toObjectArray(list);
}
for (Object child : children) {
currentLine = doPrint(buffer, currentLevel + 1, child, structure, comparator, maxRowCount, currentLine, paddingChar, nodePresenter);
}
return currentLine;
}
public static String print(Object[] objects) {
return print(Arrays.asList(objects));
}
public static String print(Collection<?> c) {
return c.stream().map(each -> toString(each, null)).collect(Collectors.joining("\n"));
}
public static String print(@NotNull ListModel<?> model) {
StringBuilder result = new StringBuilder();
for (int i = 0; i < model.getSize(); i++) {
result.append(toString(model.getElementAt(i), null));
result.append("\n");
}
return result.toString();
}
public static String print(JTree tree) {
return print(tree, false);
}
public static void invokeNamedAction(final String actionId) {
final AnAction action = ActionManager.getInstance().getAction(actionId);
assertNotNull(action);
final Presentation presentation = new Presentation();
@SuppressWarnings("deprecation") final DataContext context = DataManager.getInstance().getDataContext();
final AnActionEvent event = AnActionEvent.createFromAnAction(action, null, "", context);
action.beforeActionPerformedUpdate(event);
assertTrue(presentation.isEnabled());
action.actionPerformed(event);
}
public static void assertTiming(final String message, final long expectedMs, final long actual) {
if (COVERAGE_ENABLED_BUILD) return;
long expectedOnMyMachine = Math.max(1, expectedMs * Timings.CPU_TIMING / Timings.REFERENCE_CPU_TIMING);
// Allow 10% more in case of test machine is busy.
String logMessage = message;
if (actual > expectedOnMyMachine) {
int percentage = (int)(100.0 * (actual - expectedOnMyMachine) / expectedOnMyMachine);
logMessage += ". Operation took " + percentage + "% longer than expected";
}
logMessage += ". Expected on my machine: " + expectedOnMyMachine + "." +
" Actual: " + actual + "." +
" Expected on Standard machine: " + expectedMs + ";" +
" Timings: CPU=" + Timings.CPU_TIMING +
", I/O=" + Timings.IO_TIMING + ".";
final double acceptableChangeFactor = 1.1;
if (actual < expectedOnMyMachine) {
System.out.println(logMessage);
TeamCityLogger.info(logMessage);
}
else if (actual < expectedOnMyMachine * acceptableChangeFactor) {
TeamCityLogger.warning(logMessage, null);
}
else {
// throw AssertionFailedError to try one more time
throw new AssertionFailedError(logMessage);
}
}
/**
* An example: {@code startPerformanceTest("calculating pi",100, testRunnable).assertTiming();}
*/
@Contract(pure = true) // to warn about not calling .assertTiming() in the end
public static PerformanceTestInfo startPerformanceTest(@NonNls @NotNull String what, int expectedMs, @NotNull ThrowableRunnable<?> test) {
return new PerformanceTestInfo(test, expectedMs, what);
}
public static void assertPathsEqual(@Nullable String expected, @Nullable String actual) {
if (expected != null) expected = FileUtil.toSystemIndependentName(expected);
if (actual != null) actual = FileUtil.toSystemIndependentName(actual);
assertEquals(expected, actual);
}
@NotNull
public static String getJavaExe() {
return SystemProperties.getJavaHome() + (SystemInfo.isWindows ? "\\bin\\java.exe" : "/bin/java");
}
@NotNull
public static URL getRtJarURL() {
String home = SystemProperties.getJavaHome();
try {
return JavaVersion.current().feature >= 9 ? new URL("jrt:" + home) : new File(home + "/lib/rt.jar").toURI().toURL();
}
catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
public static void forceCloseProjectWithoutSaving(@NotNull Project project) {
ProjectManagerEx.getInstanceEx().forceCloseProject(project);
}
public static void saveProject(@NotNull Project project) {
saveProject(project, false);
}
public static void saveProject(@NotNull Project project, boolean isForceSavingAllSettings) {
StoreReloadManager.getInstance().flushChangedProjectFileAlarm();
StateStorageManagerKt.saveComponentManager(project, isForceSavingAllSettings);
}
static void waitForAllBackgroundActivityToCalmDown() {
for (int i = 0; i < 50; i++) {
CpuUsageData data = CpuUsageData.measureCpuUsage(() -> TimeoutUtil.sleep(100));
if (!data.hasAnyActivityBesides(Thread.currentThread())) {
break;
}
}
}
public static void assertTiming(String message, long expected, @NotNull Runnable actionToMeasure) {
assertTiming(message, expected, 4, actionToMeasure);
}
@SuppressWarnings("CallToSystemGC")
public static void assertTiming(String message, long expected, int attempts, @NotNull Runnable actionToMeasure) {
while (true) {
attempts--;
waitForAllBackgroundActivityToCalmDown();
long duration = TimeoutUtil.measureExecutionTime(actionToMeasure::run);
try {
assertTiming(message, expected, duration);
break;
}
catch (AssertionFailedError e) {
if (attempts == 0) throw e;
System.gc();
System.gc();
System.gc();
String s = e.getMessage() + "\n " + attempts + " " + StringUtil.pluralize("attempt", attempts) + " remain";
TeamCityLogger.warning(s, null);
System.err.println(s);
}
}
}
private static Map<String, VirtualFile> buildNameToFileMap(VirtualFile[] files, @Nullable VirtualFileFilter filter) {
Map<String, VirtualFile> map = new HashMap<>();
for (VirtualFile file : files) {
if (filter != null && !filter.accept(file)) continue;
map.put(file.getName(), file);
}
return map;
}
public static void assertDirectoriesEqual(VirtualFile dirExpected, VirtualFile dirActual) throws IOException {
assertDirectoriesEqual(dirExpected, dirActual, null);
}
@SuppressWarnings("UnsafeVfsRecursion")
public static void assertDirectoriesEqual(VirtualFile dirExpected, VirtualFile dirActual, @Nullable VirtualFileFilter fileFilter) throws IOException {
FileDocumentManager.getInstance().saveAllDocuments();
VirtualFile[] childrenAfter = dirExpected.getChildren();
shallowCompare(dirExpected, childrenAfter);
VirtualFile[] childrenBefore = dirActual.getChildren();
shallowCompare(dirActual, childrenBefore);
Map<String, VirtualFile> mapAfter = buildNameToFileMap(childrenAfter, fileFilter);
Map<String, VirtualFile> mapBefore = buildNameToFileMap(childrenBefore, fileFilter);
Set<String> keySetAfter = mapAfter.keySet();
Set<String> keySetBefore = mapBefore.keySet();
assertEquals(dirExpected.getPath(), keySetAfter, keySetBefore);
for (String name : keySetAfter) {
VirtualFile fileAfter = mapAfter.get(name);
VirtualFile fileBefore = mapBefore.get(name);
if (fileAfter.isDirectory()) {
assertDirectoriesEqual(fileAfter, fileBefore, fileFilter);
}
else {
assertFilesEqual(fileAfter, fileBefore);
}
}
}
private static void shallowCompare(VirtualFile dir, VirtualFile[] vfs) {
if (dir.isInLocalFileSystem() && dir.getFileSystem() != TempFileSystem.getInstance()) {
String vfsPaths = Stream.of(vfs).map(VirtualFile::getPath).sorted().collect(Collectors.joining("\n"));
File[] io = notNull(new File(dir.getPath()).listFiles());
String ioPaths = Stream.of(io).map(f -> FileUtil.toSystemIndependentName(f.getPath())).sorted().collect(Collectors.joining("\n"));
assertEquals(vfsPaths, ioPaths);
}
}
public static void assertFilesEqual(VirtualFile fileExpected, VirtualFile fileActual) throws IOException {
try {
assertJarFilesEqual(VfsUtilCore.virtualToIoFile(fileExpected), VfsUtilCore.virtualToIoFile(fileActual));
}
catch (IOException e) {
String actual = fileText(fileActual);
String expected = fileText(fileExpected);
if (expected == null || actual == null) {
assertArrayEquals(fileExpected.getPath(), fileExpected.contentsToByteArray(), fileActual.contentsToByteArray());
}
else if (!StringUtil.equals(expected, actual)) {
throw new FileComparisonFailure("Text mismatch in the file " + fileExpected.getName(), expected, actual, fileExpected.getPath());
}
}
}
private static String fileText(VirtualFile file) throws IOException {
Document doc = FileDocumentManager.getInstance().getDocument(file);
if (doc != null) {
return doc.getText();
}
if (!file.getFileType().isBinary() || FileTypeRegistry.getInstance().isFileOfType(file, FileTypes.UNKNOWN)) {
return LoadTextUtil.getTextByBinaryPresentation(file.contentsToByteArray(false), file).toString();
}
return null;
}
private static void assertJarFilesEqual(File file1, File file2) throws IOException {
final File tempDir = FileUtilRt.createTempDirectory("assert_jar_tmp", null, false);
try {
final File tempDirectory1 = new File(tempDir, "tmp1");
final File tempDirectory2 = new File(tempDir, "tmp2");
FileUtilRt.createDirectory(tempDirectory1);
FileUtilRt.createDirectory(tempDirectory2);
try (JarFile jarFile1 = new JarFile(file1)) {
try (JarFile jarFile2 = new JarFile(file2)) {
new Decompressor.Zip(new File(jarFile1.getName())).extract(tempDirectory1);
new Decompressor.Zip(new File(jarFile2.getName())).extract(tempDirectory2);
}
}
final VirtualFile dirAfter = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(tempDirectory1);
assertNotNull(tempDirectory1.toString(), dirAfter);
final VirtualFile dirBefore = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(tempDirectory2);
assertNotNull(tempDirectory2.toString(), dirBefore);
ApplicationManager.getApplication().runWriteAction(() -> {
dirAfter.refresh(false, true);
dirBefore.refresh(false, true);
});
assertDirectoriesEqual(dirAfter, dirBefore);
}
finally {
FileUtilRt.delete(tempDir);
}
}
@NotNull
public static String getCommunityPath() {
final String homePath = IdeaTestExecutionPolicy.getHomePathWithPolicy();
if (new File(homePath, "community/.idea").isDirectory()) {
return homePath + File.separatorChar + "community";
}
return homePath;
}
@NotNull
public static String getPlatformTestDataPath() {
return getCommunityPath().replace(File.separatorChar, '/') + "/platform/platform-tests/testData/";
}
@NotNull
@Contract(pure = true)
public static Comparator<AbstractTreeNode<?>> createComparator(final Queryable.PrintInfo printInfo) {
return (o1, o2) -> {
String displayText1 = o1.toTestString(printInfo);
String displayText2 = o2.toTestString(printInfo);
return Comparing.compare(displayText1, displayText2);
};
}
@NotNull
public static <T> T notNull(@Nullable T t) {
assertNotNull(t);
return t;
}
@NotNull
public static String loadFileText(@NotNull String fileName) throws IOException {
return StringUtil.convertLineSeparators(FileUtil.loadFile(new File(fileName)));
}
public static void withEncoding(@NotNull String encoding, @NotNull ThrowableRunnable<?> r) {
Charset.forName(encoding); // check the encoding exists
try {
Charset oldCharset = Charset.defaultCharset();
try {
patchSystemFileEncoding(encoding);
r.run();
}
finally {
patchSystemFileEncoding(oldCharset.name());
}
}
catch (Throwable t) {
throw new RuntimeException(t);
}
}
private static void patchSystemFileEncoding(String encoding) {
ReflectionUtil.resetField(Charset.class, Charset.class, "defaultCharset");
System.setProperty("file.encoding", encoding);
}
@SuppressWarnings("ImplicitDefaultCharsetUsage")
public static void withStdErrSuppressed(@NotNull Runnable r) {
PrintStream std = System.err;
System.setErr(new PrintStream(NULL));
try {
r.run();
}
finally {
System.setErr(std);
}
}
@SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
private static final OutputStream NULL = new OutputStream() {
@Override
public void write(int b) { }
};
public static void assertSuccessful(@NotNull GeneralCommandLine command) {
try {
ProcessOutput output = ExecUtil.execAndGetOutput(command.withRedirectErrorStream(true));
assertEquals(output.getStdout(), 0, output.getExitCode());
}
catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
@NotNull
public static List<WebReference> collectWebReferences(@NotNull PsiElement element) {
List<WebReference> refs = new ArrayList<>();
element.accept(new PsiRecursiveElementWalkingVisitor() {
@Override
public void visitElement(@NotNull PsiElement element) {
for (PsiReference ref : element.getReferences()) {
if (ref instanceof WebReference) {
refs.add((WebReference)ref);
}
}
super.visitElement(element);
}
});
return refs;
}
@NotNull
@SuppressWarnings("unchecked")
public static <T extends PsiReference> T getReferenceOfTypeWithAssertion(@Nullable PsiReference reference, Class<T> refType) {
if (refType.isInstance(reference)) return (T)reference;
if (reference instanceof PsiMultiReference) {
PsiReference[] psiReferences = ((PsiMultiReference)reference).getReferences();
for (PsiReference psiReference : psiReferences) {
if (refType.isInstance(psiReference)) return (T)psiReference;
}
}
throw new AssertionError(
"given reference should be " + refType + " but " + (reference != null ? reference.getClass() : null) + " was given");
}
public static void registerProjectCleanup(@NotNull Runnable cleanup) {
ourProjectCleanups.add(cleanup);
}
public static void cleanupAllProjects() {
for (Runnable each : ourProjectCleanups) {
each.run();
}
ourProjectCleanups.clear();
}
public static void captureMemorySnapshot() {
try {
@SuppressWarnings("SpellCheckingInspection") String className = "com.jetbrains.performancePlugin.profilers.YourKitProfilerHandler";
Method snapshot = ReflectionUtil.getMethod(Class.forName(className), "captureMemorySnapshot");
if (snapshot != null) {
Object path = snapshot.invoke(null);
System.out.println("Memory snapshot captured to '" + path + "'");
}
}
catch (ClassNotFoundException e) {
// YourKitProfilerHandler is missing from the classpath, ignore
}
catch (Exception e) {
e.printStackTrace(System.err);
}
}
public static <T> void assertComparisonContractNotViolated(@NotNull List<? extends T> values,
@NotNull Comparator<? super T> comparator,
@NotNull Equality<? super T> equality) {
for (int i1 = 0; i1 < values.size(); i1++) {
for (int i2 = i1; i2 < values.size(); i2++) {
T value1 = values.get(i1);
T value2 = values.get(i2);
int result12 = comparator.compare(value1, value2);
int result21 = comparator.compare(value2, value1);
if (equality.equals(value1, value2)) {
assertEquals(String.format("Equal, but not 0: '%s' - '%s'", value1, value2), 0, result12);
assertEquals(String.format("Equal, but not 0: '%s' - '%s'", value2, value1), 0, result21);
}
else {
if (result12 == 0) fail(String.format("Not equal, but 0: '%s' - '%s'", value1, value2));
if (result21 == 0) fail(String.format("Not equal, but 0: '%s' - '%s'", value2, value1));
if (Integer.signum(result12) == Integer.signum(result21)) {
fail(String.format("Not symmetrical: '%s' - '%s'", value1, value2));
}
}
for (int i3 = i2; i3 < values.size(); i3++) {
T value3 = values.get(i3);
int result23 = comparator.compare(value2, value3);
int result31 = comparator.compare(value3, value1);
if (!isTransitive(result12, result23, result31)) {
fail(String.format("Not transitive: '%s' - '%s' - '%s'", value1, value2, value3));
}
}
}
}
}
private static boolean isTransitive(int result12, int result23, int result31) {
if (result12 == 0 && result23 == 0 && result31 == 0) return true;
if (result12 > 0 && result23 > 0 && result31 > 0) return false;
if (result12 < 0 && result23 < 0 && result31 < 0) return false;
if (result12 == 0 && Integer.signum(result23) * Integer.signum(result31) >= 0) return false;
if (result23 == 0 && Integer.signum(result12) * Integer.signum(result31) >= 0) return false;
if (result31 == 0 && Integer.signum(result23) * Integer.signum(result12) >= 0) return false;
return true;
}
public static void setLongMeaninglessFileIncludeTemplateTemporarilyFor(@NotNull Project project, @NotNull Disposable parentDisposable) {
FileTemplateManagerImpl templateManager = (FileTemplateManagerImpl)FileTemplateManager.getInstance(project);
templateManager.setDefaultFileIncludeTemplateTextTemporarilyForTest(FileTemplateManager.FILE_HEADER_TEMPLATE_NAME,
"/**\n" +
" * Created by ${USER} on ${DATE}.\n" +
" */\n", parentDisposable);
}
/*
* 1. Think twice before use - do you really need to use VFS.
* 2. Be aware the method doesn't refresh VFS as it should be done in tests (see {@link PlatformTestCase#synchronizeTempDirVfs})
* (it is assumed that project is already created in a correct way).
*/
@NotNull
public static VirtualFile getOrCreateProjectTestBaseDir(@NotNull Project project) {
try {
String path = Objects.requireNonNull(project.getBasePath());
VirtualFile result = LocalFileSystem.getInstance().refreshAndFindFileByPath(path);
if (result != null) {
return result;
}
// createDirectories executes in write action
return Objects.requireNonNull(VfsUtil.createDirectories(Objects.requireNonNull(project.getBasePath())));
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
@Nullable
public static RunConfiguration getRunConfiguration(@NotNull PsiElement element, @NotNull RunConfigurationProducer producer) {
MapDataContext dataContext = new MapDataContext();
dataContext.put(CommonDataKeys.PROJECT, element.getProject());
dataContext.put(LangDataKeys.MODULE, ModuleUtilCore.findModuleForPsiElement(element));
final Location<PsiElement> location = PsiLocation.fromPsiElement(element);
dataContext.put(Location.DATA_KEY, location);
ConfigurationContext cc = ConfigurationContext.getFromContext(dataContext);
final ConfigurationFromContext configuration = producer.createConfigurationFromContext(cc);
return configuration != null ? configuration.getConfiguration() : null;
}
public static ExecutionEnvironment executeConfiguration(@NotNull RunConfiguration runConfiguration) throws InterruptedException {
return executeConfiguration(runConfiguration, DefaultRunExecutor.EXECUTOR_ID);
}
public static ExecutionEnvironment executeConfiguration(@NotNull RunConfiguration runConfiguration, @NotNull String executorId) throws InterruptedException {
Project project = runConfiguration.getProject();
ConfigurationFactory factory = runConfiguration.getFactory();
if (factory == null) {
fail("No factory found for: " + runConfiguration);
}
RunnerAndConfigurationSettings runnerAndConfigurationSettings =
RunManager.getInstance(project).createConfiguration(runConfiguration, factory);
ProgramRunner runner = ProgramRunner.getRunner(executorId, runConfiguration);
if (runner == null) {
fail("No runner found for: " + executorId + " and " + runConfiguration);
}
Ref<RunContentDescriptor> refRunContentDescriptor = new Ref<>();
ExecutionEnvironment executionEnvironment =
new ExecutionEnvironment(DefaultRunExecutor.getRunExecutorInstance(), runner, runnerAndConfigurationSettings,
project);
CountDownLatch latch = new CountDownLatch(1);
ProgramRunnerUtil.executeConfigurationAsync(executionEnvironment, false, false, new ProgramRunner.Callback() {
@Override
public void processStarted(RunContentDescriptor descriptor) {
LOG.debug("Process started");
refRunContentDescriptor.set(descriptor);
latch.countDown();
}
});
latch.await(60, TimeUnit.SECONDS);
ProcessHandler processHandler = refRunContentDescriptor.get().getProcessHandler();
if (processHandler == null) {
fail("No process handler found");
}
CapturingProcessAdapter capturingProcessAdapter = new CapturingProcessAdapter();
processHandler.addProcessListener(capturingProcessAdapter);
processHandler.waitFor(60000);
LOG.debug("Process terminated: " + processHandler.isProcessTerminated());
ProcessOutput processOutput = capturingProcessAdapter.getOutput();
LOG.debug("Exit code: " + processOutput.getExitCode());
LOG.debug("Stdout: " + processOutput.getStdout());
LOG.debug("Stderr: " + processOutput.getStderr());
return executionEnvironment;
}
public static PsiElement findElementBySignature(@NotNull String signature, @NotNull String fileRelativePath, @NotNull Project project) {
String filePath = project.getBasePath() + File.separator + fileRelativePath;
VirtualFile virtualFile = LocalFileSystem.getInstance().findFileByPath(filePath);
if (virtualFile == null || !virtualFile.exists()) {
throw new IllegalArgumentException(String.format("File '%s' doesn't exist", filePath));
}
PsiFile psiFile = PsiManager.getInstance(project).findFile(virtualFile);
if (psiFile == null) {
return null;
}
int offset = psiFile.getText().indexOf(signature);
return psiFile.findElementAt(offset);
}
public static void useAppConfigDir(ThrowableRunnable<? extends Exception> task) throws Exception {
Path configDir = PathManager.getConfigDir();
Path configCopy;
if (Files.exists(configDir)) {
configCopy = Files.move(configDir, Paths.get(configDir + "_bak"), StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);
}
else {
FileUtil.delete(configDir);
configCopy = null;
}
try {
task.run();
}
finally {
FileUtil.delete(configDir);
if (configCopy != null) {
Files.move(configCopy, configDir, StandardCopyOption.ATOMIC_MOVE);
}
}
}
}
|
|
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.event.listener;
import com.facebook.buck.artifact_cache.ArtifactCacheEvent;
import com.facebook.buck.core.model.BuildId;
import com.facebook.buck.core.test.event.TestRunEvent;
import com.facebook.buck.core.test.event.TestStatusMessageEvent;
import com.facebook.buck.core.test.event.TestSummaryEvent;
import com.facebook.buck.distributed.DistBuildCreatedEvent;
import com.facebook.buck.distributed.DistBuildStatusEvent;
import com.facebook.buck.distributed.StampedeLocalBuildStatusEvent;
import com.facebook.buck.distributed.build_client.DistBuildSuperConsoleEvent;
import com.facebook.buck.distributed.build_client.StampedeConsoleEvent;
import com.facebook.buck.distributed.thrift.BuildSlaveRunId;
import com.facebook.buck.distributed.thrift.BuildSlaveStatus;
import com.facebook.buck.event.ActionGraphEvent;
import com.facebook.buck.event.ArtifactCompressionEvent;
import com.facebook.buck.event.ConsoleEvent;
import com.facebook.buck.event.DaemonEvent;
import com.facebook.buck.event.FlushConsoleEvent;
import com.facebook.buck.event.LeafEvent;
import com.facebook.buck.event.LeafEvents;
import com.facebook.buck.event.ParsingEvent;
import com.facebook.buck.event.RuleKeyCalculationEvent;
import com.facebook.buck.event.WatchmanStatusEvent;
import com.facebook.buck.log.Logger;
import com.facebook.buck.step.StepEvent;
import com.facebook.buck.test.TestResultSummary;
import com.facebook.buck.test.TestResultSummaryVerbosity;
import com.facebook.buck.test.TestResults;
import com.facebook.buck.test.TestStatusMessage;
import com.facebook.buck.test.result.type.ResultType;
import com.facebook.buck.util.Console;
import com.facebook.buck.util.ExitCode;
import com.facebook.buck.util.MoreIterables;
import com.facebook.buck.util.environment.ExecutionEnvironment;
import com.facebook.buck.util.timing.Clock;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.eventbus.Subscribe;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.logging.Level;
import javax.annotation.concurrent.GuardedBy;
/** Console that provides rich, updating ansi output about the current build. */
public class SuperConsoleEventBusListener extends AbstractConsoleEventBusListener {
/**
* Maximum expected rendered line length so we can start with a decent size of line rendering
* buffer.
*/
private static final int EXPECTED_MAXIMUM_RENDERED_LINE_LENGTH = 128;
private static final Logger LOG = Logger.get(SuperConsoleEventBusListener.class);
@VisibleForTesting static final String EMOJI_BUNNY = "\uD83D\uDC07";
private final Locale locale;
private final Function<Long, String> formatTimeFunction;
private final ConcurrentMap<Long, Optional<? extends TestSummaryEvent>>
threadsToRunningTestSummaryEvent;
private final ConcurrentMap<Long, Optional<? extends TestStatusMessageEvent>>
threadsToRunningTestStatusMessageEvent;
private final ConcurrentMap<Long, Optional<? extends LeafEvent>> threadsToRunningStep;
private final ConcurrentLinkedQueue<ConsoleEvent> logEvents;
private final ScheduledExecutorService renderScheduler;
private final TestResultFormatter testFormatter;
private final AtomicInteger numPassingTests = new AtomicInteger(0);
private final AtomicInteger numFailingTests = new AtomicInteger(0);
private final AtomicInteger numExcludedTests = new AtomicInteger(0);
private final AtomicInteger numDisabledTests = new AtomicInteger(0);
private final AtomicInteger numAssumptionViolationTests = new AtomicInteger(0);
private final AtomicInteger numDryRunTests = new AtomicInteger(0);
private final AtomicReference<TestRunEvent.Started> testRunStarted;
private final AtomicReference<TestRunEvent.Finished> testRunFinished;
private final ImmutableList.Builder<String> testReportBuilder = ImmutableList.builder();
private final ImmutableList.Builder<TestStatusMessage> testStatusMessageBuilder =
ImmutableList.builder();
private final AtomicBoolean anyWarningsPrinted = new AtomicBoolean(false);
private final AtomicBoolean anyErrorsPrinted = new AtomicBoolean(false);
private final int defaultThreadLineLimit;
private final int threadLineLimitOnWarning;
private final int threadLineLimitOnError;
private final boolean shouldAlwaysSortThreadsByTime;
private final long buildRuleMinimumDurationMillis;
private final DateFormat dateFormat;
private int lastNumLinesPrinted;
private Optional<String> parsingStatus = Optional.empty();
// Save if Watchman reported zero file changes in case we receive an ActionGraphCache hit. This
// way the user can know that their changes, if they made any, were not picked up from Watchman.
private boolean isZeroFileChanges = false;
private final Object distBuildSlaveTrackerLock = new Object();
private long minimumDurationMillisecondsToShowParse;
private long minimumDurationMillisecondsToShowActionGraph;
private long minimumDurationMillisecondsToShowWatchman;
private boolean hideEmptyDownload;
@GuardedBy("distBuildSlaveTrackerLock")
private final Map<BuildSlaveRunId, BuildSlaveStatus> distBuildSlaveTracker;
private volatile StampedeLocalBuildStatusEvent stampedeLocalBuildStatus =
new StampedeLocalBuildStatusEvent("init");
private volatile Optional<DistBuildSuperConsoleEvent> stampedeSuperConsoleEvent =
Optional.empty();
private Optional<String> stampedeIdLogLine = Optional.empty();
private final Set<String> actionGraphCacheMessage = new HashSet<>();
/** Maximum width of the terminal. */
private final int outputMaxColumns;
private final Optional<String> buildIdLine;
public SuperConsoleEventBusListener(
SuperConsoleConfig config,
Console console,
Clock clock,
TestResultSummaryVerbosity summaryVerbosity,
ExecutionEnvironment executionEnvironment,
Locale locale,
Path testLogPath,
TimeZone timeZone,
Optional<BuildId> buildId) {
this(
config,
console,
clock,
summaryVerbosity,
executionEnvironment,
locale,
testLogPath,
timeZone,
500L,
500L,
1000L,
true,
buildId);
}
@VisibleForTesting
public SuperConsoleEventBusListener(
SuperConsoleConfig config,
Console console,
Clock clock,
TestResultSummaryVerbosity summaryVerbosity,
ExecutionEnvironment executionEnvironment,
Locale locale,
Path testLogPath,
TimeZone timeZone,
long minimumDurationMillisecondsToShowParse,
long minimumDurationMillisecondsToShowActionGraph,
long minimumDurationMillisecondsToShowWatchman,
boolean hideEmptyDownload,
Optional<BuildId> buildId) {
super(
console,
clock,
locale,
executionEnvironment,
false,
config.getNumberOfSlowRulesToShow(),
config.shouldShowSlowRulesInConsole());
this.locale = locale;
this.formatTimeFunction = this::formatElapsedTime;
this.threadsToRunningTestSummaryEvent =
new ConcurrentHashMap<>(executionEnvironment.getAvailableCores());
this.threadsToRunningTestStatusMessageEvent =
new ConcurrentHashMap<>(executionEnvironment.getAvailableCores());
this.threadsToRunningStep = new ConcurrentHashMap<>(executionEnvironment.getAvailableCores());
this.logEvents = new ConcurrentLinkedQueue<>();
this.renderScheduler =
Executors.newScheduledThreadPool(
1,
new ThreadFactoryBuilder().setNameFormat(getClass().getSimpleName() + "-%d").build());
this.testFormatter =
new TestResultFormatter(
console.getAnsi(),
console.getVerbosity(),
summaryVerbosity,
locale,
Optional.of(testLogPath));
this.testRunStarted = new AtomicReference<>();
this.testRunFinished = new AtomicReference<>();
this.defaultThreadLineLimit = config.getThreadLineLimit();
this.threadLineLimitOnWarning = config.getThreadLineLimitOnWarning();
this.threadLineLimitOnError = config.getThreadLineLimitOnError();
this.shouldAlwaysSortThreadsByTime = config.shouldAlwaysSortThreadsByTime();
this.buildRuleMinimumDurationMillis = config.getBuildRuleMinimumDurationMillis();
this.minimumDurationMillisecondsToShowParse = minimumDurationMillisecondsToShowParse;
this.minimumDurationMillisecondsToShowActionGraph =
minimumDurationMillisecondsToShowActionGraph;
this.minimumDurationMillisecondsToShowWatchman = minimumDurationMillisecondsToShowWatchman;
this.hideEmptyDownload = hideEmptyDownload;
this.dateFormat = new SimpleDateFormat("[yyyy-MM-dd HH:mm:ss.SSS]", this.locale);
this.dateFormat.setTimeZone(timeZone);
// Using LinkedHashMap because we want a predictable iteration order.
this.distBuildSlaveTracker = new LinkedHashMap<>();
int outputMaxColumns = 80;
if (config.getThreadLineOutputMaxColumns().isPresent()) {
outputMaxColumns = config.getThreadLineOutputMaxColumns().get();
} else {
Optional<String> columnsStr = executionEnvironment.getenv("BUCK_TERM_COLUMNS");
if (columnsStr.isPresent()) {
try {
outputMaxColumns = Integer.parseInt(columnsStr.get());
} catch (NumberFormatException e) {
LOG.debug(
"the environment variable BUCK_TERM_COLUMNS did not contain a valid value: %s",
columnsStr.get());
}
}
// If the parsed value is zero, we reset the value to the default 80.
if (outputMaxColumns == 0) {
outputMaxColumns = 80;
}
}
this.outputMaxColumns = outputMaxColumns;
this.buildIdLine =
buildId.isPresent()
? Optional.of(SimpleConsoleEventBusListener.getBuildLogLine(buildId.get()))
: Optional.empty();
}
/** Schedules a runnable that updates the console output at a fixed interval. */
public void startRenderScheduler(long renderInterval, TimeUnit timeUnit) {
LOG.debug("Starting render scheduler (interval %d ms)", timeUnit.toMillis(renderInterval));
renderScheduler.scheduleAtFixedRate(
() -> {
try {
SuperConsoleEventBusListener.this.render();
} catch (Error | RuntimeException e) {
LOG.error(e, "Rendering exception");
throw e;
}
}, /* initialDelay */
renderInterval, /* period */
renderInterval,
timeUnit);
}
/** Shuts down the thread pool and cancels the fixed interval runnable. */
private synchronized void stopRenderScheduler() {
LOG.debug("Stopping render scheduler");
renderScheduler.shutdownNow();
}
@VisibleForTesting
synchronized void render() {
LOG.verbose("Rendering");
int previousNumLinesPrinted = lastNumLinesPrinted;
ImmutableList<String> lines = createRenderLinesAtTime(clock.currentTimeMillis());
ImmutableList<String> logLines = createLogRenderLines();
lastNumLinesPrinted = lines.size();
// Synchronize on the DirtyPrintStreamDecorator to prevent interlacing of output.
// We don't log immediately so we avoid locking the console handler to avoid deadlocks.
boolean stderrDirty;
boolean stdoutDirty;
synchronized (console.getStdErr()) {
synchronized (console.getStdOut()) {
// If another source has written to stderr, stop rendering with the SuperConsole.
// We need to do this to keep our updates consistent. We don't do this with stdout
// because we don't use it directly except in a couple of cases, where the
// synchronization in DirtyPrintStreamDecorator should be sufficient
stderrDirty = console.getStdErr().isDirty();
stdoutDirty = console.getStdOut().isDirty();
if (stderrDirty || stdoutDirty) {
stopRenderScheduler();
} else if (previousNumLinesPrinted != 0 || !lines.isEmpty() || !logLines.isEmpty()) {
String fullFrame = renderFullFrame(logLines, lines, previousNumLinesPrinted);
console.getStdErr().getRawStream().print(fullFrame);
}
}
}
if (stderrDirty) {
LOG.debug("Stopping console output (stderr was dirty).");
}
}
private String renderFullFrame(
ImmutableList<String> logLines, ImmutableList<String> lines, int previousNumLinesPrinted) {
int currentNumLines = lines.size();
Iterable<String> renderedLines =
Iterables.concat(
MoreIterables.zipAndConcat(
Iterables.cycle(ansi.clearLine()),
logLines,
Iterables.cycle(ansi.clearToTheEndOfLine() + "\n")),
ansi.asNoWrap(
MoreIterables.zipAndConcat(
Iterables.cycle(ansi.clearLine()),
lines,
Iterables.cycle(ansi.clearToTheEndOfLine() + "\n"))));
// Number of lines remaining to clear because of old output once we displayed
// the new output.
int remainingLinesToClear =
previousNumLinesPrinted > currentNumLines ? previousNumLinesPrinted - currentNumLines : 0;
StringBuilder fullFrame = new StringBuilder();
// We move the cursor back to the top.
for (int i = 0; i < previousNumLinesPrinted; i++) {
fullFrame.append(ansi.cursorPreviousLine(1));
}
// We display the new output.
for (String part : renderedLines) {
fullFrame.append(part);
}
// We clear the remaining lines of the old output.
for (int i = 0; i < remainingLinesToClear; i++) {
fullFrame.append(ansi.clearLine() + "\n");
}
// We move the cursor at the end of the new output.
for (int i = 0; i < remainingLinesToClear; i++) {
fullFrame.append(ansi.cursorPreviousLine(1));
}
return fullFrame.toString();
}
/**
* Creates a list of lines to be rendered at a given time.
*
* @param currentTimeMillis The time in ms to use when computing elapsed times.
*/
@VisibleForTesting
public ImmutableList<String> createRenderLinesAtTime(long currentTimeMillis) {
ImmutableList.Builder<String> lines = ImmutableList.builder();
if (buildIdLine.isPresent()) {
lines.add(buildIdLine.get());
}
logEventPair(
"Processing filesystem changes",
Optional.empty(),
currentTimeMillis,
/* offsetMs */ 0L,
watchmanStarted,
watchmanFinished,
Optional.empty(),
Optional.of(this.minimumDurationMillisecondsToShowWatchman),
lines);
long parseTime =
logEventPair(
"Parsing buck files",
/* suffix */ Optional.empty(),
currentTimeMillis,
/* offsetMs */ 0L,
buckFilesParsingEvents.values(),
getEstimatedProgressOfParsingBuckFiles(),
Optional.of(this.minimumDurationMillisecondsToShowParse),
lines);
long actionGraphTime =
logEventPair(
"Creating action graph",
/* suffix */ Optional.empty(),
currentTimeMillis,
/* offsetMs */ 0L,
actionGraphEvents.values(),
getEstimatedProgressOfParsingBuckFiles(),
Optional.of(this.minimumDurationMillisecondsToShowActionGraph),
lines);
logEventPair(
"Generating project",
Optional.empty(),
currentTimeMillis,
/* offsetMs */ 0L,
projectGenerationStarted,
projectGenerationFinished,
getEstimatedProgressOfGeneratingProjectFiles(),
Optional.empty(),
lines);
// If parsing has not finished, then there is no build rule information to print yet.
if (buildStarted == null
|| parseTime == UNFINISHED_EVENT_PAIR
|| actionGraphTime == UNFINISHED_EVENT_PAIR) {
return lines.build();
}
int maxThreadLines = defaultThreadLineLimit;
if (anyWarningsPrinted.get() && threadLineLimitOnWarning < maxThreadLines) {
maxThreadLines = threadLineLimitOnWarning;
}
if (anyErrorsPrinted.get() && threadLineLimitOnError < maxThreadLines) {
maxThreadLines = threadLineLimitOnError;
}
String localBuildLinePrefix = "Building";
if (stampedeSuperConsoleEvent.isPresent()) {
localBuildLinePrefix = stampedeLocalBuildStatus.getLocalBuildLinePrefix();
stampedeIdLogLine.ifPresent(lines::add);
stampedeSuperConsoleEvent
.get()
.getMessage()
.ifPresent(msg -> lines.add(ansi.asInformationText(msg)));
long distBuildMs =
logEventPair(
"Distributed Build",
getOptionalDistBuildLineSuffix(),
currentTimeMillis,
0,
this.distBuildStarted,
this.distBuildFinished,
getApproximateDistBuildProgress(),
Optional.empty(),
lines);
if (distBuildMs == UNFINISHED_EVENT_PAIR) {
MultiStateRenderer renderer;
synchronized (distBuildSlaveTrackerLock) {
renderer =
new DistBuildSlaveStateRenderer(
ansi, currentTimeMillis, ImmutableList.copyOf(distBuildSlaveTracker.values()));
}
renderLines(renderer, lines, maxThreadLines, true);
}
}
if (networkStatsKeeper.getRemoteDownloadedArtifactsCount() > 0 || !this.hideEmptyDownload) {
lines.add(getNetworkStatsLine(buildFinished));
}
// Check to see if the build encompasses the time spent parsing. This is true for runs of
// buck build but not so for runs of e.g. buck project. If so, subtract parse times
// from the build time.
long buildStartedTime = buildStarted.getTimestamp();
long buildFinishedTime =
buildFinished != null ? buildFinished.getTimestamp() : currentTimeMillis;
Collection<EventPair> filteredBuckFilesParsingEvents =
getEventsBetween(buildStartedTime, buildFinishedTime, buckFilesParsingEvents.values());
Collection<EventPair> filteredActionGraphEvents =
getEventsBetween(buildStartedTime, buildFinishedTime, actionGraphEvents.values());
long offsetMs =
getTotalCompletedTimeFromEventPairs(filteredBuckFilesParsingEvents)
+ getTotalCompletedTimeFromEventPairs(filteredActionGraphEvents);
long totalBuildMs =
logEventPair(
localBuildLinePrefix,
getOptionalBuildLineSuffix(),
currentTimeMillis,
offsetMs, // parseTime,
this.buildStarted,
this.buildFinished,
getApproximateLocalBuildProgress(),
Optional.empty(),
lines);
getTotalTimeLine(lines);
showTopSlowBuildRules(lines);
if (totalBuildMs == UNFINISHED_EVENT_PAIR) {
MultiStateRenderer renderer =
new BuildThreadStateRenderer(
ansi,
formatTimeFunction,
currentTimeMillis,
outputMaxColumns,
buildRuleMinimumDurationMillis,
threadsToRunningStep,
buildRuleThreadTracker);
renderLines(renderer, lines, maxThreadLines, shouldAlwaysSortThreadsByTime);
}
long testRunTime =
logEventPair(
"Testing",
renderTestSuffix(),
currentTimeMillis,
0, /* offsetMs */
testRunStarted.get(),
testRunFinished.get(),
Optional.empty(),
Optional.empty(),
lines);
if (testRunTime == UNFINISHED_EVENT_PAIR) {
MultiStateRenderer renderer =
new TestThreadStateRenderer(
ansi,
formatTimeFunction,
currentTimeMillis,
outputMaxColumns,
threadsToRunningTestSummaryEvent,
threadsToRunningTestStatusMessageEvent,
threadsToRunningStep,
buildRuleThreadTracker);
renderLines(renderer, lines, maxThreadLines, shouldAlwaysSortThreadsByTime);
}
logEventPair(
"Installing",
/* suffix */ Optional.empty(),
currentTimeMillis,
0L,
installStarted,
installFinished,
Optional.empty(),
Optional.empty(),
lines);
logHttpCacheUploads(lines);
return lines.build();
}
private void getTotalTimeLine(ImmutableList.Builder<String> lines) {
if (projectGenerationStarted == null) {
// project generation never started
// we only output total time if build started and finished
if (buildStarted != null && buildFinished != null) {
long durationMs = buildFinished.getTimestamp() - buildStarted.getTimestamp();
String finalLine = " Total time: " + formatElapsedTime(durationMs);
if (distBuildStarted != null) {
// Stampede build. We need to print final status to reduce confusion from remote errors.
finalLine += ". ";
if (buildFinished.getExitCode() == ExitCode.SUCCESS) {
finalLine += ansi.asGreenText("Build successful.");
} else {
finalLine += ansi.asErrorText("Build failed.");
}
}
lines.add(finalLine);
}
} else {
// project generation started, it may or may not contain a build
// we wait for generation to finish to output time
if (projectGenerationFinished != null) {
long durationMs =
projectGenerationFinished.getTimestamp() - projectGenerationStarted.getTimestamp();
lines.add(" Total time: " + formatElapsedTime(durationMs));
}
}
}
private Optional<String> getOptionalDistBuildLineSuffix() {
List<String> columns = new ArrayList<>();
synchronized (distBuildStatusLock) {
if (!distBuildStatus.isPresent()) {
columns.add("remote status: init");
} else {
distBuildStatus
.get()
.getStatus()
.ifPresent(status -> columns.add("remote status: " + status.toLowerCase()));
int totalUploadErrorsCount = 0;
ImmutableList.Builder<CacheRateStatsKeeper.CacheRateStatsUpdateEvent> slaveCacheStats =
new ImmutableList.Builder<>();
for (BuildSlaveStatus slaveStatus : distBuildStatus.get().getSlaveStatuses()) {
totalUploadErrorsCount += slaveStatus.getHttpArtifactUploadsFailureCount();
if (slaveStatus.isSetCacheRateStats()) {
slaveCacheStats.add(
CacheRateStatsKeeper.getCacheRateStatsUpdateEventFromSerializedStats(
slaveStatus.getCacheRateStats()));
}
}
if (distBuildTotalRulesCount > 0) {
columns.add(
String.format("%d/%d jobs", distBuildFinishedRulesCount, distBuildTotalRulesCount));
}
CacheRateStatsKeeper.CacheRateStatsUpdateEvent aggregatedCacheStats =
CacheRateStatsKeeper.getAggregatedCacheRateStats(slaveCacheStats.build());
if (aggregatedCacheStats.getTotalRulesCount() != 0) {
columns.add(String.format("%.1f%% cache miss", aggregatedCacheStats.getCacheMissRate()));
if (aggregatedCacheStats.getCacheErrorCount() != 0) {
columns.add(
String.format(
"%d [%.1f%%] cache errors",
aggregatedCacheStats.getCacheErrorCount(),
aggregatedCacheStats.getCacheErrorRate()));
}
}
if (totalUploadErrorsCount > 0) {
columns.add(String.format("%d upload errors", totalUploadErrorsCount));
}
}
}
String localStatus = String.format("local status: %s", stampedeLocalBuildStatus.getStatus());
String remoteStatusAndSummary = Joiner.on(", ").join(columns);
if (remoteStatusAndSummary.length() == 0) {
return Optional.of(localStatus);
}
String parseLine;
parseLine = remoteStatusAndSummary + "; " + localStatus;
return Strings.isNullOrEmpty(parseLine) ? Optional.empty() : Optional.of(parseLine);
}
/** Adds log messages for rendering. */
@VisibleForTesting
ImmutableList<String> createLogRenderLines() {
ImmutableList.Builder<String> logEventLinesBuilder = ImmutableList.builder();
ConsoleEvent logEvent;
while ((logEvent = logEvents.poll()) != null) {
formatConsoleEvent(logEvent, logEventLinesBuilder);
if (logEvent.getLevel().equals(Level.WARNING)) {
anyWarningsPrinted.set(true);
} else if (logEvent.getLevel().equals(Level.SEVERE)) {
anyErrorsPrinted.set(true);
}
}
return logEventLinesBuilder.build();
}
/**
* Render lines using the provided {@param renderer}.
*
* @return the number of lines created.
*/
public int renderLines(
MultiStateRenderer renderer,
ImmutableList.Builder<String> lines,
int maxLines,
boolean alwaysSortByTime) {
int numLinesRendered = 0;
int threadCount = renderer.getExecutorCount();
int fullLines = threadCount;
boolean useCompressedLine = false;
if (threadCount > maxLines) {
// One line will be used for the remaining threads that don't get their own line.
fullLines = maxLines - 1;
useCompressedLine = true;
}
int threadsWithShortStatus = threadCount - fullLines;
boolean sortByTime = alwaysSortByTime || useCompressedLine;
ImmutableList<Long> threadIds = renderer.getSortedExecutorIds(sortByTime);
StringBuilder lineBuilder = new StringBuilder(EXPECTED_MAXIMUM_RENDERED_LINE_LENGTH);
for (int i = 0; i < fullLines; ++i) {
long threadId = threadIds.get(i);
lineBuilder.delete(0, lineBuilder.length());
lines.add(renderer.renderStatusLine(threadId, lineBuilder));
numLinesRendered++;
}
if (useCompressedLine) {
lineBuilder.delete(0, lineBuilder.length());
lineBuilder.append(" - ");
lineBuilder.append(threadsWithShortStatus);
if (fullLines == 0) {
lineBuilder.append(String.format(" %s:", renderer.getExecutorCollectionLabel()));
} else {
lineBuilder.append(String.format(" MORE %s:", renderer.getExecutorCollectionLabel()));
}
for (int i = fullLines; i < threadIds.size(); ++i) {
long threadId = threadIds.get(i);
lineBuilder.append(" ");
lineBuilder.append(renderer.renderShortStatus(threadId));
}
lines.add(lineBuilder.toString());
numLinesRendered++;
}
return numLinesRendered;
}
private Optional<String> renderTestSuffix() {
int testPassesVal = numPassingTests.get();
int testFailuresVal = numFailingTests.get();
int testSkipsVal =
numDisabledTests.get()
+ numAssumptionViolationTests.get()
+
// don't count: numExcludedTests.get() +
numDryRunTests.get();
if (testSkipsVal > 0) {
return Optional.of(
String.format(
locale, "(%d PASS/%d SKIP/%d FAIL)", testPassesVal, testSkipsVal, testFailuresVal));
} else if (testPassesVal > 0 || testFailuresVal > 0) {
return Optional.of(
String.format(locale, "(%d PASS/%d FAIL)", testPassesVal, testFailuresVal));
} else {
return Optional.empty();
}
}
@Subscribe
public void stepStarted(StepEvent.Started started) {
threadsToRunningStep.put(started.getThreadId(), Optional.of(started));
}
@Subscribe
public void stepFinished(StepEvent.Finished finished) {
threadsToRunningStep.put(finished.getThreadId(), Optional.empty());
}
// TODO(cjhopman): We should introduce a simple LeafEvent-like thing that everything that logs
// step-like things can subscribe to.
@Subscribe
public void simpleLeafEventStarted(LeafEvents.SimpleLeafEvent.Started started) {
threadsToRunningStep.put(started.getThreadId(), Optional.of(started));
}
@Subscribe
public void simpleLeafEventFinished(LeafEvents.SimpleLeafEvent.Finished finished) {
threadsToRunningStep.put(finished.getThreadId(), Optional.empty());
}
@Subscribe
public void ruleKeyCalculationStarted(RuleKeyCalculationEvent.Started started) {
threadsToRunningStep.put(started.getThreadId(), Optional.of(started));
}
@Subscribe
public void ruleKeyCalculationFinished(RuleKeyCalculationEvent.Finished finished) {
threadsToRunningStep.put(finished.getThreadId(), Optional.empty());
}
@Override
@Subscribe
public void onDistBuildStatusEvent(DistBuildStatusEvent event) {
super.onDistBuildStatusEvent(event);
synchronized (distBuildSlaveTrackerLock) {
for (BuildSlaveStatus status : event.getStatus().getSlaveStatuses()) {
distBuildSlaveTracker.put(status.buildSlaveRunId, status);
}
}
}
@Subscribe
public void onStampedeLocalBuildStatusEvent(StampedeLocalBuildStatusEvent event) {
this.stampedeLocalBuildStatus = event;
}
@Subscribe
public void onDistBuildCreatedEvent(DistBuildCreatedEvent event) {
stampedeIdLogLine = Optional.of(event.getConsoleLogLine());
}
@Subscribe
public void onDistBuildSuperConsoleEvent(DistBuildSuperConsoleEvent event) {
stampedeSuperConsoleEvent = Optional.of(event);
}
/** When a new cache event is about to start. */
@Subscribe
public void artifactCacheStarted(ArtifactCacheEvent.Started started) {
if (started.getInvocationType() == ArtifactCacheEvent.InvocationType.SYNCHRONOUS) {
threadsToRunningStep.put(started.getThreadId(), Optional.of(started));
}
}
@Subscribe
public void artifactCacheFinished(ArtifactCacheEvent.Finished finished) {
if (finished.getInvocationType() == ArtifactCacheEvent.InvocationType.SYNCHRONOUS) {
threadsToRunningStep.put(finished.getThreadId(), Optional.empty());
}
}
@Subscribe
public void artifactCompressionStarted(ArtifactCompressionEvent.Started started) {
threadsToRunningStep.put(started.getThreadId(), Optional.of(started));
}
@Subscribe
public void artifactCompressionFinished(ArtifactCompressionEvent.Finished finished) {
threadsToRunningStep.put(finished.getThreadId(), Optional.empty());
}
@Subscribe
public void testRunStarted(TestRunEvent.Started event) {
boolean set = testRunStarted.compareAndSet(null, event);
Preconditions.checkState(set, "Test run should not start while test run in progress");
ImmutableList.Builder<String> builder = ImmutableList.builder();
testFormatter.runStarted(
builder,
event.isRunAllTests(),
event.getTestSelectorList(),
event.shouldExplainTestSelectorList(),
event.getTargetNames(),
TestResultFormatter.FormatMode.AFTER_TEST_RUN);
synchronized (testReportBuilder) {
testReportBuilder.addAll(builder.build());
}
}
@Subscribe
public void testRunFinished(TestRunEvent.Finished finished) {
boolean set = testRunFinished.compareAndSet(null, finished);
Preconditions.checkState(set, "Test run should not finish after test run already finished");
ImmutableList.Builder<String> builder = ImmutableList.builder();
for (TestResults results : finished.getResults()) {
testFormatter.reportResult(builder, results);
}
ImmutableList<TestStatusMessage> testStatusMessages;
synchronized (testStatusMessageBuilder) {
testStatusMessages = testStatusMessageBuilder.build();
}
testFormatter.runComplete(builder, finished.getResults(), testStatusMessages);
String testOutput;
synchronized (testReportBuilder) {
testReportBuilder.addAll(builder.build());
testOutput = Joiner.on('\n').join(testReportBuilder.build());
}
// We're about to write to stdout, so make sure we render the final frame before we do.
render();
synchronized (console.getStdOut()) {
console.getStdOut().println(testOutput);
}
}
@Subscribe
public void testStatusMessageStarted(TestStatusMessageEvent.Started started) {
threadsToRunningTestStatusMessageEvent.put(started.getThreadId(), Optional.of(started));
synchronized (testStatusMessageBuilder) {
testStatusMessageBuilder.add(started.getTestStatusMessage());
}
}
@Subscribe
public void testStatusMessageFinished(TestStatusMessageEvent.Finished finished) {
threadsToRunningTestStatusMessageEvent.put(finished.getThreadId(), Optional.empty());
synchronized (testStatusMessageBuilder) {
testStatusMessageBuilder.add(finished.getTestStatusMessage());
}
}
@Subscribe
public void testSummaryStarted(TestSummaryEvent.Started started) {
threadsToRunningTestSummaryEvent.put(started.getThreadId(), Optional.of(started));
}
@Subscribe
public void testSummaryFinished(TestSummaryEvent.Finished finished) {
threadsToRunningTestSummaryEvent.put(finished.getThreadId(), Optional.empty());
TestResultSummary testResult = finished.getTestResultSummary();
ResultType resultType = testResult.getType();
switch (resultType) {
case SUCCESS:
numPassingTests.incrementAndGet();
break;
case FAILURE:
numFailingTests.incrementAndGet();
// We don't use TestResultFormatter.reportResultSummary() here since that also
// includes the stack trace and stdout/stderr.
logEvents.add(
ConsoleEvent.severe(
String.format(
locale,
"%s %s %s: %s",
testResult.getType().toString(),
testResult.getTestCaseName(),
testResult.getTestName(),
testResult.getMessage())));
break;
case ASSUMPTION_VIOLATION:
numAssumptionViolationTests.incrementAndGet();
break;
case DISABLED:
numDisabledTests.incrementAndGet();
break;
case DRY_RUN:
numDryRunTests.incrementAndGet();
break;
case EXCLUDED:
numExcludedTests.incrementAndGet();
break;
}
}
@Subscribe
public void logEvent(ConsoleEvent event) {
if (console.getVerbosity().isSilent() && !event.getLevel().equals(Level.SEVERE)) {
return;
}
logEvents.add(event);
}
@Subscribe
public void logStampedeConsoleEvent(StampedeConsoleEvent event) {
if (stampedeSuperConsoleEvent.isPresent()) {
logEvent(event.getConsoleEvent());
}
}
@Subscribe
public void forceRender(@SuppressWarnings("unused") FlushConsoleEvent event) {
render();
}
@Override
public void printSevereWarningDirectly(String line) {
logEvents.add(ConsoleEvent.severe(line));
}
private void printInfoDirectlyOnce(String line) {
if (console.getVerbosity().isSilent()) {
return;
}
if (!actionGraphCacheMessage.contains(line)) {
logEvents.add(ConsoleEvent.info(line));
actionGraphCacheMessage.add(line);
}
}
@Subscribe
@SuppressWarnings("unused")
public void actionGraphCacheHit(ActionGraphEvent.Cache.Hit event) {
// We don't need to report when it's fast.
if (isZeroFileChanges) {
LOG.debug("Action graph cache hit: Watchman reported no changes");
} else {
LOG.debug("Action graph cache hit");
}
parsingStatus = Optional.of("actionGraphCacheHit");
}
@Subscribe
public void watchmanOverflow(WatchmanStatusEvent.Overflow event) {
printInfoDirectlyOnce(
"Action graph will be rebuilt because there was an issue with watchman:\n"
+ event.getReason());
parsingStatus = Optional.of("watchmanOverflow: " + event.getReason());
}
private void printFileAddedOrRemoved() {
printInfoDirectlyOnce("Action graph will be rebuilt because files have been added or removed.");
}
@Subscribe
public void watchmanFileCreation(WatchmanStatusEvent.FileCreation event) {
LOG.debug("Watchman notified about file addition: " + event.getFilename());
printFileAddedOrRemoved();
parsingStatus = Optional.of("watchmanFileCreation");
}
@Subscribe
public void watchmanFileDeletion(WatchmanStatusEvent.FileDeletion event) {
LOG.debug("Watchman notified about file deletion: " + event.getFilename());
printFileAddedOrRemoved();
parsingStatus = Optional.of("watchmanFileDeletion");
}
@Subscribe
@SuppressWarnings("unused")
public void watchmanZeroFileChanges(WatchmanStatusEvent.ZeroFileChanges event) {
isZeroFileChanges = true;
parsingStatus = Optional.of("watchmanZeroFileChanges");
}
@Subscribe
@SuppressWarnings("unused")
public void daemonNewInstance(DaemonEvent.NewDaemonInstance event) {
parsingStatus = Optional.of("daemonNewInstance");
}
@Subscribe
@SuppressWarnings("unused")
public void symlinkInvalidation(ParsingEvent.SymlinkInvalidation event) {
printInfoDirectlyOnce("Action graph will be rebuilt because symlinks are used.");
parsingStatus = Optional.of("symlinkInvalidation");
}
@Subscribe
@SuppressWarnings("unused")
public void envVariableChange(ParsingEvent.EnvVariableChange event) {
printInfoDirectlyOnce("Action graph will be rebuilt because environment variables changed.");
parsingStatus = Optional.of("envVariableChange");
}
@VisibleForTesting
static Optional<String> createParsingMessage(String emoji, String reason) {
if (Charset.defaultCharset().equals(Charsets.UTF_8)) {
return Optional.of(emoji + " " + reason);
} else {
if (emoji.equals(EMOJI_BUNNY)) {
return Optional.of("(FAST)");
} else {
return Optional.of("(SLOW) " + reason);
}
}
}
@Override
protected String formatElapsedTime(long elapsedTimeMs) {
long minutes = elapsedTimeMs / 60_000L;
long seconds = elapsedTimeMs / 1000 - (minutes * 60);
long milliseconds = elapsedTimeMs % 1000;
if (elapsedTimeMs >= 60_000L) {
return String.format("%02d:%02d.%d min", minutes, seconds, milliseconds / 100);
} else {
return String.format("%d.%d sec", seconds, milliseconds / 100);
}
}
@VisibleForTesting
Optional<String> getParsingStatus() {
return parsingStatus;
}
@Override
public synchronized void close() throws IOException {
super.close();
stopRenderScheduler();
render(); // Ensure final frame is rendered.
}
@Override
public boolean displaysEstimatedProgress() {
return true;
}
}
|
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.monitoring.exporter.http;
import java.util.Collections;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.client.Response;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.monitoring.exporter.http.HttpResource.ResourcePublishResult;
import java.util.Map;
import static org.elasticsearch.xpack.monitoring.exporter.http.AsyncHttpResourceHelper.wrapMockListener;
import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.GET_EXISTS;
import static org.elasticsearch.xpack.monitoring.exporter.http.WatcherExistsHttpResource.XPACK_DOES_NOT_EXIST;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
/**
* Tests {@link WatcherExistsHttpResource}.
*/
public class WatcherExistsHttpResourceTests extends AbstractPublishableHttpResourceTestCase {
private final ClusterService clusterService = mock(ClusterService.class);
private final MultiHttpResource watches = mock(MultiHttpResource.class);
private final WatcherExistsHttpResource resource = new WatcherExistsHttpResource(owner, clusterService, watches);
private final Map<String, String> expectedParameters = getParameters(resource.getDefaultParameters(), GET_EXISTS, XPACK_DOES_NOT_EXIST);
public void testDoCheckIgnoresClientWhenNotElectedMaster() {
whenNotElectedMaster();
resource.doCheck(client, wrapMockListener(checkListener));
verify(checkListener).onResponse(true);
verifyZeroInteractions(client);
}
public void testDoCheckExistsFor404() {
whenElectedMaster();
// /_xpack returning a 404 means ES didn't handle the request properly and X-Pack doesn't exist
doCheckWithStatusCode(resource, "", "_xpack", notFoundCheckStatus(),
GET_EXISTS, XPACK_DOES_NOT_EXIST, true);
}
public void testDoCheckExistsFor400() {
whenElectedMaster();
// /_xpack returning a 400 means X-Pack does not exist
doCheckWithStatusCode(resource, "", "_xpack", RestStatus.BAD_REQUEST,
GET_EXISTS, XPACK_DOES_NOT_EXIST, true);
}
public void testDoCheckExistsAsElectedMaster() {
whenElectedMaster();
final String[] noWatcher = {
"{}",
"{\"features\":{\"watcher\":{\"available\":true,\"enabled\":false}}}",
"{\"features\":{\"watcher\":{\"available\":false,\"enabled\":true}}}",
"{\"features\":{\"watcher\":{\"available\":true}}}",
"{\"features\":{\"watcher\":{\"enabled\":true}}}"
};
final String endpoint = "/_xpack";
// success only implies that it responded; it also needs to be available and enabled
final Response response = response("GET", endpoint, successfulCheckStatus());
final HttpEntity responseEntity = new StringEntity(randomFrom(noWatcher), ContentType.APPLICATION_JSON);
when(response.getEntity()).thenReturn(responseEntity);
// returning EXISTS implies that we CANNOT use Watcher to avoid running the publish phase
doCheckWithStatusCode(resource, expectedParameters, endpoint, true, response);
verify(response).getEntity();
}
public void testDoCheckDoesNotExist() {
whenElectedMaster();
final String[] hasWatcher = {
"{\"features\":{\"watcher\":{\"available\":true,\"enabled\":true}}}",
"{\"features\":{\"watcher\":{\"enabled\":true,\"available\":true}}}"
};
final String endpoint = "/_xpack";
// success only implies that it responded; it also needs to be available and enabled
final Response response = response("GET", endpoint, successfulCheckStatus());
final HttpEntity responseEntity = new StringEntity(randomFrom(hasWatcher), ContentType.APPLICATION_JSON);
when(response.getEntity()).thenReturn(responseEntity);
// returning DOES_NOT_EXIST implies that we CAN use Watcher and need to run the publish phase
doCheckWithStatusCode(resource, expectedParameters, endpoint, false, response);
verify(response).getEntity();
}
public void testDoCheckErrorWithDataException() {
whenElectedMaster();
final String[] errorWatcher = {
"{\"features\":{}}", // missing watcher object 'string'
"{\"watcher\":{\"enabled\":true,\"available\":true}}", // missing features outer object
"{{}" // extra {
};
final String endpoint = "/_xpack";
// success only implies that it responded; it also needs to be available and enabled
final Response response = response("GET", endpoint, successfulCheckStatus());
final HttpEntity responseEntity = new StringEntity(randomFrom(errorWatcher), ContentType.APPLICATION_JSON);
when(response.getEntity()).thenReturn(responseEntity);
// returning an error implies that we CAN use Watcher and need to run the publish phase
doCheckWithStatusCode(resource, expectedParameters, endpoint, null, response);
}
public void testDoCheckErrorWithResponseException() {
whenElectedMaster();
assertCheckWithException(resource, expectedParameters, "", "_xpack");
}
public void testDoPublishTrue() {
final boolean checkResponse = randomBoolean();
final boolean publish = checkResponse == false;
final MockHttpResource mockWatch = new MockHttpResource(owner, randomBoolean(), checkResponse, publish);
final MultiHttpResource watches = new MultiHttpResource(owner, Collections.singletonList(mockWatch));
final WatcherExistsHttpResource resource = new WatcherExistsHttpResource(owner, clusterService, watches);
resource.doPublish(client, wrapMockListener(publishListener));
verifyPublishListener(ResourcePublishResult.ready());
assertThat(mockWatch.checked, is(1));
assertThat(mockWatch.published, is(publish ? 1 : 0));
}
public void testDoPublishFalse() {
final MockHttpResource mockWatch = new MockHttpResource(owner, true, false, false);
final MultiHttpResource watches = new MultiHttpResource(owner, Collections.singletonList(mockWatch));
final WatcherExistsHttpResource resource = new WatcherExistsHttpResource(owner, clusterService, watches);
resource.doPublish(client, wrapMockListener(publishListener));
verifyPublishListener(new ResourcePublishResult(false));
assertThat(mockWatch.checked, is(1));
assertThat(mockWatch.published, is(1));
}
public void testDoPublishException() {
final MockHttpResource mockWatch = new MockHttpResource(owner, true, false, null);
final MultiHttpResource watches = new MultiHttpResource(owner, Collections.singletonList(mockWatch));
final WatcherExistsHttpResource resource = new WatcherExistsHttpResource(owner, clusterService, watches);
resource.doPublish(client, wrapMockListener(publishListener));
verifyPublishListener(null);
assertThat(mockWatch.checked, is(1));
assertThat(mockWatch.published, is(1));
}
public void testParameters() {
final Map<String, String> parameters = resource.getDefaultParameters();
assertThat(parameters.get("filter_path"), is(WatcherExistsHttpResource.WATCHER_CHECK_PARAMETERS.get("filter_path")));
assertThat(parameters.size(), is(1));
}
public void testGetResources() {
assertThat(resource.getWatches(), sameInstance(watches));
}
private void whenElectedMaster() {
final ClusterState state = mock(ClusterState.class);
final DiscoveryNodes nodes = mock(DiscoveryNodes.class);
when(clusterService.state()).thenReturn(state);
when(state.nodes()).thenReturn(nodes);
when(nodes.isLocalNodeElectedMaster()).thenReturn(true);
}
private void whenNotElectedMaster() {
final ClusterState state = mock(ClusterState.class);
final DiscoveryNodes nodes = mock(DiscoveryNodes.class);
when(clusterService.state()).thenReturn(state);
when(state.nodes()).thenReturn(nodes);
when(nodes.isLocalNodeElectedMaster()).thenReturn(false);
}
}
|
|
/*
* Copyright (c) 2002-2022, City of Paris
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright notice
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice
* and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* License 1.0
*/
package fr.paris.lutece.portal.service.search;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.springframework.mock.web.MockHttpServletRequest;
import fr.paris.lutece.portal.business.page.Page;
import fr.paris.lutece.portal.service.init.LuteceInitException;
import fr.paris.lutece.portal.service.security.LuteceUser;
import fr.paris.lutece.portal.service.security.MokeLuteceAuthentication;
import fr.paris.lutece.portal.service.security.SecurityService;
import fr.paris.lutece.portal.service.spring.SpringContextService;
import fr.paris.lutece.portal.service.util.AppPropertiesService;
import fr.paris.lutece.test.LuteceTestCase;
/**
* Test the LuceneSearchEngine class
*/
public class LuceneSearchEngineTest extends LuteceTestCase
{
private static final String BEAN_SEARCH_ENGINE = "searchEngine";
private static boolean firstRun = true;
private static SearchEngine _engine;
/* mimic initialization in IndexationService.processIndexing */
private IndexWriter getIndexWriter( ) throws Exception
{
Directory dir = IndexationService.getDirectoryIndex( );
IndexWriterConfig conf = new IndexWriterConfig( IndexationService.getAnalyser( ) );
conf.setOpenMode( OpenMode.CREATE );
return new IndexWriter( dir, conf );
}
@Override
protected void setUp( ) throws Exception
{
super.setUp( );
if ( firstRun )
{
firstRun = false;
_engine = SpringContextService.getBean( BEAN_SEARCH_ENGINE );
FieldType ft = new FieldType( StringField.TYPE_STORED );
ft.setOmitNorms( false );
Document doc = new Document( );
doc.add( new Field( SearchItem.FIELD_DATE, "2014-06-06", ft ) );
doc.add( new Field( SearchItem.FIELD_CONTENTS, "lutecefoo lutecebar", TextField.TYPE_NOT_STORED ) );
doc.add( new StringField( SearchItem.FIELD_METADATA, "lutecetag", Field.Store.NO ) );
doc.add( new Field( SearchItem.FIELD_TYPE, "lutecetype", ft ) );
doc.add( new Field( SearchItem.FIELD_ROLE, "role1", ft ) );
// Not using IndexationService.write(doc) because it needs to be
// called by IndexationService.processIndexing() (or else it throws null pointer exception)
IndexWriter indexWriter = getIndexWriter( );
indexWriter.addDocument( doc );
doc = new Document( );
doc.add( new Field( SearchItem.FIELD_CONTENTS, "lutecebaz", TextField.TYPE_NOT_STORED ) );
doc.add( new Field( SearchItem.FIELD_ROLE, Page.ROLE_NONE, ft ) );
indexWriter.addDocument( doc );
indexWriter.close( );
}
}
public void testSearchSimpleMatch( ) throws Exception
{
MockHttpServletRequest request = new MockHttpServletRequest( );
List<SearchResult> listResults = _engine.getSearchResults( "lutecefoo", request );
assertTrue( "The search results list should have one element. Got : " + listResults, listResults != null && listResults.size( ) == 1 );
}
public void testSearchSimpleNoMatch( ) throws Exception
{
MockHttpServletRequest request = new MockHttpServletRequest( );
List<SearchResult> listResults = _engine.getSearchResults( "lutecebadfoo", request );
assertTrue( "The search results list should have no elements. Got : " + listResults, listResults != null && listResults.size( ) == 0 );
}
public void testSearchDateMatch( ) throws Exception
{
MockHttpServletRequest request = new MockHttpServletRequest( );
request.setParameter( "date_after", "01/01/2014" );
request.setParameter( "date_before", "01/10/2015" );
List<SearchResult> listResults = _engine.getSearchResults( "lutecefoo", request );
assertTrue( "The search results list should have one element. Got : " + listResults, listResults != null && listResults.size( ) == 1 );
}
public void testSearchDateNoMatch( ) throws Exception
{
MockHttpServletRequest request = new MockHttpServletRequest( );
request.setParameter( "date_after", "01/01/2010" );
request.setParameter( "date_before", "01/10/2011" );
List<SearchResult> listResults = _engine.getSearchResults( "lutecefoo", request );
assertTrue( "The search results list should have no elements. Got : " + listResults, listResults != null && listResults.size( ) == 0 );
}
public void testSearchTypeMatch( ) throws Exception
{
MockHttpServletRequest request = new MockHttpServletRequest( );
request.setParameter( "type_filter", "lutecetype" );
List<SearchResult> listResults = _engine.getSearchResults( "lutecefoo", request );
assertTrue( "The search results list should have one element. Got : " + listResults, listResults != null && listResults.size( ) == 1 );
}
public void testSearchTypeNoMatch( ) throws Exception
{
MockHttpServletRequest request = new MockHttpServletRequest( );
request.setParameter( "type_filter", "lutecebadtype" );
List<SearchResult> listResults = _engine.getSearchResults( "lutecefoo", request );
assertTrue( "The search results list should have no elements. Got : " + listResults, listResults != null && listResults.size( ) == 0 );
}
public void testSearchTagMatch( ) throws Exception
{
MockHttpServletRequest request = new MockHttpServletRequest( );
request.setParameter( "tag_filter", "lutecetag" );
List<SearchResult> listResults = _engine.getSearchResults( "lutecetag", request );
assertTrue( "The search results list should have one element. Got : " + listResults, listResults != null && listResults.size( ) == 1 );
}
public void testSearchTagNoMatch( ) throws Exception
{
MockHttpServletRequest request = new MockHttpServletRequest( );
request.setParameter( "tag_filter", "lutecetag" );
List<SearchResult> listResults = _engine.getSearchResults( "lutecebadtag", request );
assertTrue( "The search results list should have no elements. Got : " + listResults, listResults != null && listResults.size( ) == 0 );
}
public void testSearchUserMatch( ) throws Exception
{
// XXX copy pasted from PortalMenuServiceTest
boolean authStatus;
authStatus = enableAuthentication( );
try
{
LuteceUser user = new LuteceUser( "junit", SecurityService.getInstance( ).getAuthenticationService( ) )
{
};
user.setRoles( Arrays.asList( "role1" ) );
MockHttpServletRequest request = new MockHttpServletRequest( );
request.getSession( ).setAttribute( "lutece_user", user );
List<SearchResult> listResults = _engine.getSearchResults( "lutecefoo", request );
assertTrue( "The search results list should have one element. Got : " + listResults, listResults != null && listResults.size( ) == 1 );
}
finally
{
restoreAuthentication( authStatus );
}
}
public void testSearchUserNoMatch( ) throws Exception
{
// XXX copy pasted from PortalMenuServiceTest
boolean authStatus;
authStatus = enableAuthentication( );
try
{
LuteceUser user = new LuteceUser( "junit", SecurityService.getInstance( ).getAuthenticationService( ) )
{
};
user.setRoles( Arrays.asList( "role2" ) );
MockHttpServletRequest request = new MockHttpServletRequest( );
request.getSession( ).setAttribute( "lutece_user", user );
List<SearchResult> listResults = _engine.getSearchResults( "lutecefoo", request );
assertTrue( "The search results list should have no elements. Got : " + listResults, listResults != null && listResults.size( ) == 0 );
}
finally
{
restoreAuthentication( authStatus );
}
}
public void testSearchUserNoMatchNoUser( ) throws Exception
{
// XXX copy pasted from PortalMenuServiceTest
boolean authStatus;
authStatus = enableAuthentication( );
try
{
MockHttpServletRequest request = new MockHttpServletRequest( );
List<SearchResult> listResults = _engine.getSearchResults( "lutecebadfoo", request );
assertTrue( "The search results list should have no elements. Got : " + listResults, listResults != null && listResults.size( ) == 0 );
}
finally
{
restoreAuthentication( authStatus );
}
}
// /XXX refactor, this is copy pasted from PortalMenuServiceTest
private void restoreAuthentication( boolean status ) throws IOException, LuteceInitException
{
if ( !status )
{
File luteceProperties = new File( getResourcesDir( ), "WEB-INF/conf/lutece.properties" );
Properties props = new Properties( );
InputStream is = new FileInputStream( luteceProperties );
props.load( is );
is.close( );
props.remove( "mylutece.authentication.enable" );
props.remove( "mylutece.authentication.class" );
OutputStream os = new FileOutputStream( luteceProperties );
props.store( os, "saved for junit " + this.getClass( ).getCanonicalName( ) );
os.close( );
AppPropertiesService.reloadAll( );
SecurityService.init( );
}
}
// /XXX refactor, this is copy pasted from PortalMenuServiceTest
private boolean enableAuthentication( ) throws IOException, LuteceInitException
{
boolean status = SecurityService.isAuthenticationEnable( );
if ( !status )
{
File luteceProperties = new File( getResourcesDir( ), "WEB-INF/conf/lutece.properties" );
Properties props = new Properties( );
InputStream is = new FileInputStream( luteceProperties );
props.load( is );
is.close( );
props.setProperty( "mylutece.authentication.enable", "true" );
props.setProperty( "mylutece.authentication.class", MokeLuteceAuthentication.class.getName( ) );
OutputStream os = new FileOutputStream( luteceProperties );
props.store( os, "saved for junit " + this.getClass( ).getCanonicalName( ) );
os.close( );
AppPropertiesService.reloadAll( );
SecurityService.init( );
}
return status;
}
}
|
|
package net.glowstone.block.block2;
import com.google.common.base.Optional;
import com.google.common.collect.EnumHashBiMap;
import net.glowstone.block.block2.sponge.BlockProperty;
import java.util.*;
/**
* Implementation of {@link BlockProperty}.
*/
public abstract class GlowBlockProperty<T extends Comparable<T>> implements BlockProperty<T> {
private final String name;
private final List<T> values;
public GlowBlockProperty(String name, Collection<T> values) {
this.name = name;
ArrayList<T> sorted = new ArrayList<>(values);
Collections.sort(sorted);
this.values = Collections.unmodifiableList(sorted);
}
@Override
public final String getName() {
return name;
}
@Override
public final Collection<T> getValidValues() {
return values;
}
@Override
public final String toString() {
return getClass().getSimpleName() + "{name=" + name + ", values=" + values + "}";
}
/**
* Get the default value of this property.
* @return the default value
*/
protected T getDefault() {
return values.get(0);
}
////////////////////////////////////////////////////////////////////////////
// Static helper methods
public static <T extends Comparable<T>> T cycle(BlockProperty<T> property, Object value) {
Iterator<T> iter = property.getValidValues().iterator();
if (!iter.hasNext()) {
throw new IllegalStateException("no valid values on: " + property);
}
T first = iter.next();
if (first.equals(value)) {
if (iter.hasNext()) {
return iter.next();
} else {
return first;
}
}
while (iter.hasNext()) {
if (iter.next().equals(value)) {
if (iter.hasNext()) {
return iter.next();
} else {
// loop to the beginning
return first;
}
}
}
throw new IllegalArgumentException("cannot cycle from invalid value: " + value);
}
public static <T extends Comparable<T>> T getDefault(BlockProperty<T> property) {
if (property instanceof GlowBlockProperty) {
return ((GlowBlockProperty<T>) property).getDefault();
}
return property.getValidValues().iterator().next();
}
/**
* Validate and filter a value for this property.
* @param value the value to validate
* @return the filtered value
* @throws IllegalArgumentException if the value is invalid
*/
public static Comparable<?> validate(BlockProperty<?> property, Comparable<?> value) {
if (property.getValidValues().contains(value)) {
return value;
} else {
throw new IllegalArgumentException("Invalid value for " + property + ": " + value);
}
}
/**
* Create a new boolean property with the given name.
* @param name The name of the property
* @return A new boolean property
*/
public static BooleanProperty ofBoolean(String name) {
return new BooleanProp(name);
}
/**
* Create a new integer property with the given attributes.
* @param name The name of the property
* @param min The minimum value, inclusive
* @param max The maximum value, inclusive
* @return A new integer property
*/
public static IntegerProperty ofRange(String name, int min, int max) {
return new IntegerProp(name, min, max);
}
/**
* Create a new string property from the values of the given enumeration.
* @param name The name of the property
* @param clazz The enumeration class to use the values of
* @return A new string property
* @throws IllegalArgumentException if the class contains no values
*/
public static <E extends Enum<E>> EnumProperty<E> ofEnum(String name, Class<E> clazz) {
E[] values = clazz.getEnumConstants();
if (values == null) {
throw new IllegalArgumentException(clazz + " is not an enumeration");
}
if (values.length == 0) {
throw new IllegalArgumentException(clazz + " has no values");
}
return new EnumProp<>(name, clazz, values);
}
/**
* Create a new string property from the values of the given enumeration.
* @param name The name of the property
* @param clazz The enumeration class to use the values of
* @return A new string property
* @throws IllegalArgumentException if the class contains no values
*/
public static <E extends Enum<E>> EnumProperty<E> ofNamedEnum(String name, Class<E> clazz, Map<E, String> names) {
E[] values = clazz.getEnumConstants();
if (values == null) {
throw new IllegalArgumentException(clazz + " is not an enumeration");
}
if (values.length == 0) {
throw new IllegalArgumentException(clazz + " has no values");
}
return new NamedEnumProp<>(name, values, names);
}
/**
* Create a new string property from only some of the values of the given enumeration.
* @param name The name of the property
* @param clazz The enumeration class to use the values of
* @param values The specific values to use
* @return A new string property
* @throws IllegalArgumentException if the class contains no values
*/
@SafeVarargs
public static <E extends Enum<E>> EnumProperty<E> ofPartialEnum(String name, Class<E> clazz, E... values) {
if (values == null || values.length == 0) {
throw new IllegalArgumentException("null or no values provided");
}
for (E val : values) {
if (!clazz.isInstance(val)) {
throw new IllegalArgumentException("value " + val + " is not instanceof " + clazz.getName());
}
}
return new EnumProp<>(name, clazz, values);
}
/**
* Create a new string property from only some of the values of the given enumeration.
* @param name The name of the property
* @param clazz The enumeration class to use the values of
* @param values The specific values to use
* @return A new string property
* @throws IllegalArgumentException if the class contains no values
*/
@SafeVarargs
public static <E extends Enum<E>> EnumProperty<E> ofNamedPartialEnum(String name, Class<E> clazz, Map<E, String> names, E... values) {
if (values == null || values.length == 0) {
throw new IllegalArgumentException("null or no values provided");
}
for (E val : values) {
if (!clazz.isInstance(val)) {
throw new IllegalArgumentException("value " + val + " is not instanceof " + clazz.getName());
}
}
return new NamedEnumProp<>(name, values, names);
}
////////////////////////////////////////////////////////////////////////////
// Implementation subclasses
private static class BooleanProp extends GlowBlockProperty<Boolean> implements BooleanProperty {
private BooleanProp(String name) {
super(name, Arrays.asList(true, false));
}
@Override
public String getNameForValue(Boolean value) {
return value.toString();
}
@Override
public Optional<Boolean> getValueForName(String name) {
if (name.equalsIgnoreCase("true")) {
return Optional.of(true);
} else if (name.equalsIgnoreCase("false")) {
return Optional.of(false);
} else {
return Optional.absent();
}
}
}
private static class IntegerProp extends GlowBlockProperty<Integer> implements IntegerProperty {
private IntegerProp(String name, int min, int max) {
super(name, valuesFor(min, max));
}
private static Collection<Integer> valuesFor(int min, int max) {
List<Integer> values = new ArrayList<>(max - min + 1);
for (int i = min; i <= max; ++i) {
values.add(i);
}
return values;
}
@Override
public String getNameForValue(Integer value) {
return value.toString();
}
@Override
public Optional<Integer> getValueForName(String name) {
try {
return Optional.of(Integer.parseInt(name));
} catch (NumberFormatException e) {
return Optional.absent();
}
}
}
private static class EnumProp<E extends Enum<E>> extends GlowBlockProperty<E> implements EnumProperty<E> {
private final Class<E> clazz;
public EnumProp(String name, Class<E> clazz, E[] values) {
super(name, Arrays.asList(values));
this.clazz = clazz;
}
@Override
public String getNameForValue(E value) {
return value.name().toLowerCase();
}
@Override
public Optional<E> getValueForName(String name) {
try {
return Optional.of(Enum.valueOf(clazz, name.toUpperCase()));
} catch (IllegalArgumentException e) {
return Optional.absent();
}
}
}
private static class NamedEnumProp<E extends Enum<E>> extends GlowBlockProperty<E> implements EnumProperty<E> {
private final EnumHashBiMap<E, String> names;
public NamedEnumProp(String name, E[] values, Map<E, String> srcNames) {
super(name, Arrays.asList(values));
names = EnumHashBiMap.create(srcNames);
for (E val : values) {
if (!names.containsKey(val)) {
throw new IllegalArgumentException("Name missing for: " + val);
}
}
names.keySet().retainAll(getValidValues());
}
@Override
public String getNameForValue(E value) {
if (!names.containsKey(value)) {
throw new IllegalArgumentException("Invalid value: " + value);
}
return names.get(value);
}
@Override
public Optional<E> getValueForName(String name) {
return Optional.fromNullable(names.inverse().get(name));
}
}
}
|
|
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.google.ajiel.service.start;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.net.ConnectException;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
/**
* Start - OFBiz Container(s) Startup Class
*
*/
public class Start {
private enum Control {
SHUTDOWN {
void processRequest(Start start, PrintWriter writer) {
if (start.serverState.get() == ServerState.STOPPING) {
writer.println("IN-PROGRESS");
} else {
writer.println("OK");
writer.flush();
start.stopServer();
}
}
}, STATUS {
void processRequest(Start start, PrintWriter writer) {
writer.println(start.serverState.get());
}
}, FAIL {
void processRequest(Start start, PrintWriter writer) {
writer.println("FAIL");
}
};
abstract void processRequest(Start start, PrintWriter writer);
}
private static void help(PrintStream out) {
out.println("");
out.println("Usage: java -jar ofbiz.jar [command] [arguments]");
out.println("-both -----> Run simultaneously the POS (Point of Sales) application and OFBiz standard");
out.println("-help, -? ----> This screen");
out.println("-install -----> Run install (create tables/load data)");
out.println("-pos -----> Run the POS (Point of Sales) application");
out.println("-setup -------> Run external application server setup");
out.println("-start -------> Start the server");
out.println("-status ------> Status of the server");
out.println("-shutdown ----> Shutdown the server");
out.println("-test --------> Run the JUnit test script");
out.println("[no config] --> Use default config");
out.println("[no command] -> Start the server w/ default config");
}
private enum Command {
HELP, HELP_ERROR, STATUS, SHUTDOWN, COMMAND
}
private static Command checkCommand(Command command, Command wanted) {
if (wanted == Command.HELP || wanted.equals(command)) {
return wanted;
} else if (command == null) {
return wanted;
} else {
System.err.println("Duplicate command detected(was " + command + ", wanted " + wanted);
return Command.HELP_ERROR;
}
}
public static void main(String[] args) throws StartupException {
Command command = null;
List<String> loaderArgs = new ArrayList<String>(args.length);
for (String arg: args) {
if (arg.equals("-help") || arg.equals("-?")) {
command = checkCommand(command, Command.HELP);
} else if (arg.equals("-status")) {
command = checkCommand(command, Command.STATUS);
} else if (arg.equals("-shutdown")) {
command = checkCommand(command, Command.SHUTDOWN);
} else if (arg.startsWith("-")) {
command = checkCommand(command, Command.COMMAND);
loaderArgs.add(arg.substring(1));
} else {
command = checkCommand(command, Command.COMMAND);
if (command == Command.COMMAND) {
loaderArgs.add(arg);
} else {
command = Command.HELP_ERROR;
}
}
}
if (command == null) {
command = Command.COMMAND;
loaderArgs.add("start");
}
if (command == Command.HELP) {
help(System.out);
return;
} else if (command == Command.HELP_ERROR) {
help(System.err);
System.exit(1);
}
Start start = new Start();
start.init(args, command == Command.COMMAND);
try {
if (command == Command.STATUS) {
System.out.println("Current Status : " + start.status());
} else if (command == Command.SHUTDOWN) {
System.out.println("Shutting down server : " + start.shutdown());
} else {
// general start
start.start();
}
} catch (Exception e) {
e.printStackTrace();
System.exit(99);
}
}
private enum ServerState {
STARTING, RUNNING, STOPPING;
public String toString() {
return name().charAt(0) + name().substring(1).toLowerCase();
}
}
private Config config = null;
private List<String> loaderArgs = new ArrayList<String>();
private final ArrayList<StartupLoader> loaders = new ArrayList<StartupLoader>();
private AtomicReference<ServerState> serverState = new AtomicReference<ServerState>(ServerState.STARTING);
private Thread adminPortThread = null;
private void createListenerThread() throws StartupException {
if (config.adminPort > 0) {
this.adminPortThread = new AdminPortThread();
this.adminPortThread.start();
} else {
System.out.println("Admin socket not configured; set to port 0");
}
}
private void createLogDirectory() {
File logDir = new File(config.logDir);
if (!logDir.exists()) {
if (logDir.mkdir()) {
System.out.println("Created OFBiz log dir [" + logDir.getAbsolutePath() + "]");
}
}
}
public void init(String[] args) throws StartupException {
init(args, true);
}
public void init(String[] args, boolean fullInit) throws StartupException {
String globalSystemPropsFileName = System.getProperty("ofbiz.system.props");
if (globalSystemPropsFileName != null) {
FileInputStream stream = null;
try {
stream = new FileInputStream(globalSystemPropsFileName);
System.getProperties().load(stream);
} catch (IOException e) {
throw (StartupException) new StartupException("Couldn't load global system props").initCause(e);
} finally {
if (stream != null){
try {
stream.close();
} catch (IOException e) {
throw (StartupException) new StartupException("Couldn't close stream").initCause(e);
}
}
}
}
try {
this.config = Config.getInstance(args);
} catch (IOException e) {
throw (StartupException) new StartupException("Couldn't not fetch config instance").initCause(e);
}
// parse the startup arguments
if (args.length > 1) {
this.loaderArgs.addAll(Arrays.asList(args).subList(1, args.length));
}
if (!fullInit) {
return;
}
// initialize the classpath
initClasspath();
// create the log directory
createLogDirectory();
// create the listener thread
createListenerThread();
// set the shutdown hook
if (config.useShutdownHook) {
Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { shutdownServer(); } });
} else {
System.out.println("Shutdown hook disabled");
}
// initialize the startup loaders
initStartLoaders();
}
private void initClasspath() throws StartupException {
Classpath classPath = new Classpath(System.getProperty("java.class.path"));
try {
this.config.initClasspath(classPath);
} catch (IOException e) {
throw (StartupException) new StartupException("Couldn't initialized classpath").initCause(e);
}
// Set the classpath/classloader
System.setProperty("java.class.path", classPath.toString());
ClassLoader classloader = classPath.getClassLoader();
Thread.currentThread().setContextClassLoader(classloader);
if (System.getProperty("DEBUG") != null) {
System.out.println("Startup Classloader: " + classloader.toString());
System.out.println("Startup Classpath: " + classPath.toString());
}
}
private void initStartLoaders() throws StartupException {
ClassLoader classloader = Thread.currentThread().getContextClassLoader();
synchronized (this.loaders) {
// initialize the loaders
for (String loaderClassName: config.loaders) {
if (this.serverState.get() == ServerState.STOPPING) {
return;
}
try {
Class<?> loaderClass = classloader.loadClass(loaderClassName);
StartupLoader loader = (StartupLoader) loaderClass.newInstance();
loader.load(config, loaderArgs.toArray(new String[loaderArgs.size()]));
loaders.add(loader);
} catch (ClassNotFoundException e) {
throw (StartupException) new StartupException(e.getMessage()).initCause(e);
} catch (InstantiationException e) {
throw (StartupException) new StartupException(e.getMessage()).initCause(e);
} catch (IllegalAccessException e) {
throw (StartupException) new StartupException(e.getMessage()).initCause(e);
}
}
this.loaders.trimToSize();
}
return;
}
private String sendSocketCommand(Control control) throws IOException, ConnectException {
String response = "OFBiz is Down";
try {
Socket socket = new Socket(config.adminAddress, config.adminPort);
// send the command
PrintWriter writer = new PrintWriter(socket.getOutputStream(), true);
writer.println(config.adminKey + ":" + control);
writer.flush();
// read the reply
BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream()));
response = reader.readLine();
reader.close();
// close the socket
writer.close();
socket.close();
} catch (ConnectException e) {
System.out.println("Could not connect to " + config.adminAddress + ":" + config.adminPort);
}
return response;
}
public String shutdown() throws IOException {
return sendSocketCommand(Control.SHUTDOWN);
}
private void shutdownServer() {
ServerState currentState;
do {
currentState = this.serverState.get();
if (currentState == ServerState.STOPPING) {
return;
}
} while (!this.serverState.compareAndSet(currentState, ServerState.STOPPING));
// The current thread was the one that successfully changed the state;
// continue with further processing.
synchronized (this.loaders) {
// Unload in reverse order
for (int i = this.loaders.size(); i > 0; i--) {
StartupLoader loader = this.loaders.get(i - 1);
try {
loader.unload();
} catch (Exception e) {
e.printStackTrace();
}
}
}
if (this.adminPortThread != null && this.adminPortThread.isAlive()) {
this.adminPortThread.interrupt();
}
}
// org.apache.commons.daemon.Daemon.start()
public void start() throws Exception {
if (!startStartLoaders()) {
if (this.serverState.get() == ServerState.STOPPING) {
return;
} else {
throw new Exception("Error during start.");
}
}
if (config.shutdownAfterLoad) {
stopServer();
}
}
/**
* Returns <code>true</code> if all loaders were started.
*
* @return <code>true</code> if all loaders were started.
*/
private boolean startStartLoaders() {
synchronized (this.loaders) {
// start the loaders
for (StartupLoader loader: this.loaders) {
if (this.serverState.get() == ServerState.STOPPING) {
return false;
}
try {
loader.start();
} catch (StartupException e) {
e.printStackTrace();
return false;
}
}
}
return this.serverState.compareAndSet(ServerState.STARTING, ServerState.RUNNING);
}
public String status() throws IOException {
try {
return sendSocketCommand(Control.STATUS);
} catch (ConnectException e) {
return "Not Running";
} catch (IOException e) {
throw e;
}
}
public void stopServer() {
shutdownServer();
System.exit(0);
}
// org.apache.commons.daemon.Daemon.destroy()
public void destroy() {
// FIXME: undo init() calls.
}
// org.apache.commons.daemon.Daemon.stop()
public void stop() {
shutdownServer();
}
private class AdminPortThread extends Thread {
private ServerSocket serverSocket = null;
AdminPortThread() throws StartupException {
super("AdminPortThread");
try {
this.serverSocket = new ServerSocket(config.adminPort, 1, config.adminAddress);
} catch (IOException e) {
throw (StartupException) new StartupException("Couldn't create server socket(" + config.adminAddress + ":" + config.adminPort + ")").initCause(e);
}
setDaemon(false);
}
private void processClientRequest(Socket client) throws IOException {
BufferedReader reader = null;
PrintWriter writer = null;
try {
reader = new BufferedReader(new InputStreamReader(client.getInputStream()));
String request = reader.readLine();
writer = new PrintWriter(client.getOutputStream(), true);
Control control;
if (request != null && !request.isEmpty() && request.contains(":")) {
String key = request.substring(0, request.indexOf(':'));
if (key.equals(config.adminKey)) {
control = Control.valueOf(request.substring(request.indexOf(':') + 1));
if (control == null) {
control = Control.FAIL;
}
} else {
control = Control.FAIL;
}
} else {
control = Control.FAIL;
}
control.processRequest(Start.this, writer);
} finally {
if (reader != null) {
reader.close();
}
if (writer != null) {
writer.flush();
writer.close();
}
}
}
@Override
public void run() {
System.out.println("Admin socket configured on - " + config.adminAddress + ":" + config.adminPort);
while (!Thread.interrupted()) {
try {
Socket clientSocket = serverSocket.accept();
System.out.println("Received connection from - " + clientSocket.getInetAddress() + " : " + clientSocket.getPort());
processClientRequest(clientSocket);
clientSocket.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
|
|
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import static com.facebook.buck.jvm.java.JavaCompilationConstants.ANDROID_JAVAC_OPTIONS;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.facebook.buck.android.AndroidBinary.ExopackageMode;
import com.facebook.buck.android.aapt.RDotTxtEntry.RType;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.cxx.CxxPlatformUtils;
import com.facebook.buck.jvm.core.HasJavaClassHashes;
import com.facebook.buck.jvm.java.JavaLibrary;
import com.facebook.buck.jvm.java.JavaLibraryBuilder;
import com.facebook.buck.jvm.java.Keystore;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.rules.coercer.BuildConfigFields;
import com.facebook.buck.rules.coercer.ManifestEntries;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.MoreAsserts;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.facebook.buck.util.MoreCollectors;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.MoreExecutors;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.nio.file.Paths;
import java.util.EnumSet;
import java.util.Optional;
public class AndroidBinaryGraphEnhancerTest {
@Test
public void testCreateDepsForPreDexing() throws Exception {
// Create three Java rules, :dep1, :dep2, and :lib. :lib depends on :dep1 and :dep2.
BuildTarget javaDep1BuildTarget = BuildTargetFactory.newInstance("//java/com/example:dep1");
TargetNode<?, ?> javaDep1Node = JavaLibraryBuilder
.createBuilder(javaDep1BuildTarget)
.addSrc(Paths.get("java/com/example/Dep1.java"))
.build();
BuildTarget javaDep2BuildTarget = BuildTargetFactory.newInstance("//java/com/example:dep2");
TargetNode<?, ?> javaDep2Node = JavaLibraryBuilder
.createBuilder(javaDep2BuildTarget)
.addSrc(Paths.get("java/com/example/Dep2.java"))
.build();
BuildTarget javaLibBuildTarget = BuildTargetFactory.newInstance("//java/com/example:lib");
TargetNode<?, ?> javaLibNode = JavaLibraryBuilder
.createBuilder(javaLibBuildTarget)
.addSrc(Paths.get("java/com/example/Lib.java"))
.addDep(javaDep1Node.getBuildTarget())
.addDep(javaDep2Node.getBuildTarget())
.build();
TargetGraph targetGraph =
TargetGraphFactory.newInstance(javaDep1Node, javaDep2Node, javaLibNode);
BuildRuleResolver ruleResolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(ruleResolver);
BuildRule javaDep1 = ruleResolver.requireRule(javaDep1BuildTarget);
BuildRule javaDep2 = ruleResolver.requireRule(javaDep2BuildTarget);
BuildRule javaLib = ruleResolver.requireRule(javaLibBuildTarget);
// Assume we are enhancing an android_binary rule whose only dep
// is //java/com/example:lib, and that //java/com/example:dep2 is in its no_dx list.
ImmutableSortedSet<BuildRule> originalDeps = ImmutableSortedSet.of(javaLib);
ImmutableSet<BuildTarget> buildRulesToExcludeFromDex = ImmutableSet.of(javaDep2BuildTarget);
BuildTarget apkTarget = BuildTargetFactory.newInstance("//java/com/example:apk");
FakeProjectFilesystem filesystem = new FakeProjectFilesystem();
BuildRuleParams originalParams = new BuildRuleParams(
apkTarget,
Suppliers.ofInstance(originalDeps),
Suppliers.ofInstance(originalDeps),
filesystem
);
AndroidBinaryGraphEnhancer graphEnhancer = new AndroidBinaryGraphEnhancer(
originalParams,
ruleResolver,
ResourcesFilter.ResourceCompressionMode.DISABLED,
FilterResourcesStep.ResourceFilter.EMPTY_FILTER,
/* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class),
Optional.empty(),
/* locales */ ImmutableSet.of(),
createStrictMock(PathSourcePath.class),
AndroidBinary.PackageType.DEBUG,
/* cpuFilters */ ImmutableSet.of(),
/* shouldBuildStringSourceMap */ false,
/* shouldPreDex */ true,
BuildTargets.getScratchPath(
originalParams.getProjectFilesystem(),
apkTarget,
"%s/classes.dex"),
DexSplitMode.NO_SPLIT,
buildRulesToExcludeFromDex,
/* resourcesToExclude */ ImmutableSet.of(),
/* skipCrunchPngs */ false,
/* includesVectorDrawables */ false,
ANDROID_JAVAC_OPTIONS,
false,
EnumSet.noneOf(ExopackageMode.class),
/* buildConfigValues */ BuildConfigFields.empty(),
/* buildConfigValuesFile */ Optional.empty(),
/* xzCompressionLevel */ Optional.empty(),
/* trimResourceIds */ false,
/* keepResourcePattern */ Optional.empty(),
/* nativePlatforms */ ImmutableMap.of(),
/* nativeLibraryMergeMap */ Optional.empty(),
/* nativeLibraryMergeGlue */ Optional.empty(),
/* nativeLibraryMergeCodeGenerator */ Optional.empty(),
AndroidBinary.RelinkerMode.DISABLED,
MoreExecutors.newDirectExecutorService(),
/* manifestEntries */ ManifestEntries.empty(),
CxxPlatformUtils.DEFAULT_CONFIG,
new APKModuleGraph(
TargetGraph.EMPTY,
originalParams.getBuildTarget(),
Optional.empty()),
new DxConfig(FakeBuckConfig.builder().build()));
BuildTarget aaptPackageResourcesTarget =
BuildTargetFactory.newInstance("//java/com/example:apk#aapt_package");
BuildRuleParams aaptPackageResourcesParams =
new FakeBuildRuleParamsBuilder(aaptPackageResourcesTarget).build();
AaptPackageResources aaptPackageResources = new AaptPackageResources(
aaptPackageResourcesParams,
ruleFinder,
ruleResolver,
/* manifest */ new FakeSourcePath("java/src/com/facebook/base/AndroidManifest.xml"),
new IdentityResourcesProvider(ImmutableList.of()),
ImmutableList.of(),
ImmutableSortedSet.of(),
ImmutableSet.of(),
/* resourceUnionPackage */ Optional.empty(),
false,
/* skipCrunchPngs */ false,
/* includesVectorDrawables */ false,
/* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class),
/* manifestEntries */ ManifestEntries.empty());
ruleResolver.addToIndex(aaptPackageResources);
AndroidPackageableCollection collection = new AndroidPackageableCollector(
/* collectionRoot */ apkTarget,
ImmutableSet.of(javaDep2BuildTarget),
/* resourcesToExclude */ ImmutableSet.of(),
new APKModuleGraph(
TargetGraph.EMPTY,
apkTarget,
Optional.empty()))
.addClasspathEntry(
((HasJavaClassHashes) javaDep1), new FakeSourcePath("ignored"))
.addClasspathEntry(
((HasJavaClassHashes) javaDep2), new FakeSourcePath("ignored"))
.addClasspathEntry(
((HasJavaClassHashes) javaLib), new FakeSourcePath("ignored"))
.build();
ImmutableMultimap<APKModule, DexProducedFromJavaLibrary> preDexedLibraries =
graphEnhancer.createPreDexRulesForLibraries(
/* additionalJavaLibrariesToDex */
ImmutableList.of(),
collection);
BuildTarget fakeUberRDotJavaCompileTarget = BuildTargetFactory.newInstance(
"//fake:uber_r_dot_java#compile");
JavaLibrary fakeUberRDotJavaCompile =
JavaLibraryBuilder.createBuilder(fakeUberRDotJavaCompileTarget).build(ruleResolver);
BuildTarget fakeUberRDotJavaDexTarget = BuildTargetFactory.newInstance(
"//fake:uber_r_dot_java#dex");
DexProducedFromJavaLibrary fakeUberRDotJavaDex = new DexProducedFromJavaLibrary(
new FakeBuildRuleParamsBuilder(fakeUberRDotJavaDexTarget).build(),
fakeUberRDotJavaCompile);
ruleResolver.addToIndex(fakeUberRDotJavaDex);
BuildRule preDexMergeRule = graphEnhancer.createPreDexMergeRule(
preDexedLibraries,
fakeUberRDotJavaDex);
BuildTarget dexMergeTarget =
BuildTargetFactory.newInstance("//java/com/example:apk#dex_merge");
BuildRule dexMergeRule = ruleResolver.getRule(dexMergeTarget);
assertEquals(dexMergeRule, preDexMergeRule);
BuildTarget javaDep1DexBuildTarget =
BuildTarget.builder(javaDep1BuildTarget)
.addFlavors(AndroidBinaryGraphEnhancer.DEX_FLAVOR)
.build();
BuildTarget javaDep2DexBuildTarget =
BuildTarget.builder(javaDep2BuildTarget)
.addFlavors(AndroidBinaryGraphEnhancer.DEX_FLAVOR)
.build();
BuildTarget javaLibDexBuildTarget =
BuildTarget.builder(javaLibBuildTarget)
.addFlavors(AndroidBinaryGraphEnhancer.DEX_FLAVOR)
.build();
assertThat(
"There should be a #dex rule for dep1 and lib, but not dep2 because it is in the no_dx " +
"list. And we should depend on uber_r_dot_java",
Iterables.transform(dexMergeRule.getDeps(), BuildRule::getBuildTarget),
Matchers.allOf(
Matchers.not(Matchers.hasItem(javaDep1BuildTarget)),
Matchers.hasItem(javaDep1DexBuildTarget),
Matchers.not(Matchers.hasItem(javaDep2BuildTarget)),
Matchers.not(Matchers.hasItem(javaDep2DexBuildTarget)),
Matchers.hasItem(javaLibDexBuildTarget),
Matchers.hasItem(fakeUberRDotJavaDex.getBuildTarget())));
}
@Test
public void testAllBuildablesExceptPreDexRule() throws Exception {
// Create an android_build_config() as a dependency of the android_binary().
BuildTarget buildConfigBuildTarget = BuildTargetFactory.newInstance("//java/com/example:cfg");
BuildRuleParams buildConfigParams = new FakeBuildRuleParamsBuilder(buildConfigBuildTarget)
.build();
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
AndroidBuildConfigJavaLibrary buildConfigJavaLibrary = AndroidBuildConfigDescription
.createBuildRule(
buildConfigParams,
"com.example.buck",
/* values */ BuildConfigFields.empty(),
/* valuesFile */ Optional.empty(),
/* useConstantExpressions */ false,
ANDROID_JAVAC_OPTIONS,
false,
ruleResolver);
BuildTarget apkTarget = BuildTargetFactory.newInstance("//java/com/example:apk");
BuildRuleParams originalParams = new FakeBuildRuleParamsBuilder(apkTarget)
.setDeclaredDeps(ImmutableSortedSet.of(buildConfigJavaLibrary))
.build();
// set it up.
Keystore keystore = createStrictMock(Keystore.class);
AndroidBinaryGraphEnhancer graphEnhancer = new AndroidBinaryGraphEnhancer(
originalParams,
ruleResolver,
ResourcesFilter.ResourceCompressionMode.ENABLED_WITH_STRINGS_AS_ASSETS,
FilterResourcesStep.ResourceFilter.EMPTY_FILTER,
/* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class),
Optional.empty(),
/* locales */ ImmutableSet.of(),
new FakeSourcePath("AndroidManifest.xml"),
AndroidBinary.PackageType.DEBUG,
/* cpuFilters */ ImmutableSet.of(),
/* shouldBuildStringSourceMap */ false,
/* shouldPreDex */ false,
BuildTargets.getScratchPath(
originalParams.getProjectFilesystem(),
apkTarget,
"%s/classes.dex"),
DexSplitMode.NO_SPLIT,
/* buildRulesToExcludeFromDex */ ImmutableSet.of(),
/* resourcesToExclude */ ImmutableSet.of(),
/* skipCrunchPngs */ false,
/* includesVectorDrawables */ false,
ANDROID_JAVAC_OPTIONS,
false,
EnumSet.of(ExopackageMode.SECONDARY_DEX),
/* buildConfigValues */ BuildConfigFields.empty(),
/* buildConfigValuesFiles */ Optional.empty(),
/* xzCompressionLevel */ Optional.empty(),
/* trimResourceIds */ false,
/* keepResourcePattern */ Optional.empty(),
/* nativePlatforms */ ImmutableMap.of(),
/* nativeLibraryMergeMap */ Optional.empty(),
/* nativeLibraryMergeGlue */ Optional.empty(),
/* nativeLibraryMergeCodeGenerator */ Optional.empty(),
AndroidBinary.RelinkerMode.DISABLED,
MoreExecutors.newDirectExecutorService(),
/* manifestEntries */ ManifestEntries.empty(),
CxxPlatformUtils.DEFAULT_CONFIG,
new APKModuleGraph(
TargetGraph.EMPTY,
originalParams.getBuildTarget(),
Optional.empty()),
new DxConfig(FakeBuckConfig.builder().build()));
replay(keystore);
AndroidGraphEnhancementResult result = graphEnhancer.createAdditionalBuildables();
// Verify that android_build_config() was processed correctly.
Flavor flavor = InternalFlavor.of("buildconfig_com_example_buck");
final SourcePathResolver pathResolver =
new SourcePathResolver(new SourcePathRuleFinder(ruleResolver));
BuildTarget enhancedBuildConfigTarget = BuildTarget
.builder(apkTarget)
.addFlavors(flavor)
.build();
assertEquals(
"The only classpath entry to dex should be the one from the AndroidBuildConfigJavaLibrary" +
" created via graph enhancement.",
ImmutableSet.of(
BuildTargets
.getGenPath(
originalParams.getProjectFilesystem(),
enhancedBuildConfigTarget,
"lib__%s__output")
.resolve(enhancedBuildConfigTarget.getShortNameAndFlavorPostfix() + ".jar")),
result.getClasspathEntriesToDex().stream()
.map(pathResolver::getRelativePath)
.collect(MoreCollectors.toImmutableSet()));
BuildRule enhancedBuildConfigRule = ruleResolver.getRule(enhancedBuildConfigTarget);
assertTrue(enhancedBuildConfigRule instanceof AndroidBuildConfigJavaLibrary);
AndroidBuildConfigJavaLibrary enhancedBuildConfigJavaLibrary =
(AndroidBuildConfigJavaLibrary) enhancedBuildConfigRule;
AndroidBuildConfig androidBuildConfig = enhancedBuildConfigJavaLibrary.getAndroidBuildConfig();
assertEquals("com.example.buck", androidBuildConfig.getJavaPackage());
assertTrue(androidBuildConfig.isUseConstantExpressions());
assertEquals(
"IS_EXOPACKAGE defaults to false, but should now be true. DEBUG should still be true.",
BuildConfigFields.fromFields(ImmutableList.of(
BuildConfigFields.Field.of("boolean", "DEBUG", "true"),
BuildConfigFields.Field.of("boolean", "IS_EXOPACKAGE", "true"),
BuildConfigFields.Field.of("int", "EXOPACKAGE_FLAGS", "1"))),
androidBuildConfig.getBuildConfigFields());
ImmutableSortedSet<BuildRule> finalDeps = result.getFinalDeps();
BuildRule computeExopackageDepsAbiRule =
findRuleOfType(ruleResolver, ComputeExopackageDepsAbi.class);
assertThat(finalDeps, Matchers.hasItem(computeExopackageDepsAbiRule));
FilteredResourcesProvider resourcesProvider = result.getAaptPackageResources()
.getFilteredResourcesProvider();
assertTrue(resourcesProvider instanceof ResourcesFilter);
BuildRule resourcesFilterRule = findRuleOfType(ruleResolver, ResourcesFilter.class);
BuildRule aaptPackageResourcesRule =
findRuleOfType(ruleResolver, AaptPackageResources.class);
MoreAsserts.assertDepends(
"AaptPackageResources must depend on ResourcesFilter",
aaptPackageResourcesRule,
resourcesFilterRule);
BuildRule packageStringAssetsRule =
findRuleOfType(ruleResolver, PackageStringAssets.class);
MoreAsserts.assertDepends(
"PackageStringAssets must depend on ResourcesFilter",
packageStringAssetsRule,
aaptPackageResourcesRule);
assertFalse(result.getPreDexMerge().isPresent());
MoreAsserts.assertDepends(
"ComputeExopackageDepsAbi must depend on ResourcesFilter",
computeExopackageDepsAbiRule,
resourcesFilterRule);
MoreAsserts.assertDepends(
"ComputeExopackageDepsAbi must depend on PackageStringAssets",
computeExopackageDepsAbiRule,
packageStringAssetsRule);
MoreAsserts.assertDepends(
"ComputeExopackageDepsAbi must depend on AaptPackageResources",
computeExopackageDepsAbiRule,
aaptPackageResourcesRule);
assertTrue(result.getPackageStringAssets().isPresent());
assertTrue(result.getComputeExopackageDepsAbi().isPresent());
verify(keystore);
}
@Test
public void testResourceRulesBecomeDepsOfAaptPackageResources() throws Exception {
TargetNode<?, ?> resourceNode =
AndroidResourceBuilder
.createBuilder(BuildTargetFactory.newInstance("//:resource"))
.setRDotJavaPackage("package")
.setRes(Paths.get("res"))
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(resourceNode);
BuildRuleResolver ruleResolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
AndroidResource resource =
(AndroidResource) ruleResolver.requireRule(resourceNode.getBuildTarget());
// set it up.
BuildTarget target = BuildTargetFactory.newInstance("//:target");
BuildRuleParams originalParams =
new FakeBuildRuleParamsBuilder(target)
.setDeclaredDeps(ImmutableSortedSet.of(resource))
.build();
AndroidBinaryGraphEnhancer graphEnhancer = new AndroidBinaryGraphEnhancer(
originalParams,
ruleResolver,
ResourcesFilter.ResourceCompressionMode.ENABLED_WITH_STRINGS_AS_ASSETS,
FilterResourcesStep.ResourceFilter.EMPTY_FILTER,
/* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class),
Optional.empty(),
/* locales */ ImmutableSet.of(),
new FakeSourcePath("AndroidManifest.xml"),
AndroidBinary.PackageType.DEBUG,
/* cpuFilters */ ImmutableSet.of(),
/* shouldBuildStringSourceMap */ false,
/* shouldPreDex */ false,
BuildTargets.getScratchPath(
originalParams.getProjectFilesystem(),
target,
"%s/classes.dex"),
DexSplitMode.NO_SPLIT,
/* buildRulesToExcludeFromDex */ ImmutableSet.of(),
/* resourcesToExclude */ ImmutableSet.of(),
/* skipCrunchPngs */ false,
/* includesVectorDrawables */ false,
ANDROID_JAVAC_OPTIONS,
false,
EnumSet.of(ExopackageMode.SECONDARY_DEX),
/* buildConfigValues */ BuildConfigFields.empty(),
/* buildConfigValuesFiles */ Optional.empty(),
/* xzCompressionLevel */ Optional.empty(),
/* trimResourceIds */ false,
/* keepResourcePattern */ Optional.empty(),
/* nativePlatforms */ ImmutableMap.of(),
/* nativeLibraryMergeMap */ Optional.empty(),
/* nativeLibraryMergeGlue */ Optional.empty(),
/* nativeLibraryMergeCodeGenerator */ Optional.empty(),
AndroidBinary.RelinkerMode.DISABLED,
MoreExecutors.newDirectExecutorService(),
/* manifestEntries */ ManifestEntries.empty(),
CxxPlatformUtils.DEFAULT_CONFIG,
new APKModuleGraph(
TargetGraph.EMPTY,
originalParams.getBuildTarget(),
Optional.empty()),
new DxConfig(FakeBuckConfig.builder().build()));
graphEnhancer.createAdditionalBuildables();
BuildRule aaptPackageResourcesRule = findRuleOfType(ruleResolver, AaptPackageResources.class);
MoreAsserts.assertDepends(
"AaptPackageResources must depend on resource rules",
aaptPackageResourcesRule,
resource);
}
@Test
public void testPackageStringsDependsOnResourcesFilter() throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
// set it up.
BuildTarget target = BuildTargetFactory.newInstance("//:target");
BuildRuleParams originalParams =
new FakeBuildRuleParamsBuilder(target)
.build();
AndroidBinaryGraphEnhancer graphEnhancer = new AndroidBinaryGraphEnhancer(
originalParams,
ruleResolver,
ResourcesFilter.ResourceCompressionMode.ENABLED_WITH_STRINGS_AS_ASSETS,
FilterResourcesStep.ResourceFilter.EMPTY_FILTER,
/* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class),
Optional.empty(),
/* locales */ ImmutableSet.of(),
new FakeSourcePath("AndroidManifest.xml"),
AndroidBinary.PackageType.DEBUG,
/* cpuFilters */ ImmutableSet.of(),
/* shouldBuildStringSourceMap */ false,
/* shouldPreDex */ false,
BuildTargets.getScratchPath(
originalParams.getProjectFilesystem(),
target,
"%s/classes.dex"),
DexSplitMode.NO_SPLIT,
/* buildRulesToExcludeFromDex */ ImmutableSet.of(),
/* resourcesToExclude */ ImmutableSet.of(),
/* skipCrunchPngs */ false,
/* includesVectorDrawables */ false,
ANDROID_JAVAC_OPTIONS,
false,
EnumSet.of(ExopackageMode.SECONDARY_DEX),
/* buildConfigValues */ BuildConfigFields.empty(),
/* buildConfigValuesFiles */ Optional.empty(),
/* xzCompressionLevel */ Optional.empty(),
/* trimResourceIds */ false,
/* keepResourcePattern */ Optional.empty(),
/* nativePlatforms */ ImmutableMap.of(),
/* nativeLibraryMergeMap */ Optional.empty(),
/* nativeLibraryMergeGlue */ Optional.empty(),
/* nativeLibraryMergeCodeGenerator */ Optional.empty(),
AndroidBinary.RelinkerMode.DISABLED,
MoreExecutors.newDirectExecutorService(),
/* manifestEntries */ ManifestEntries.empty(),
CxxPlatformUtils.DEFAULT_CONFIG,
new APKModuleGraph(
TargetGraph.EMPTY,
originalParams.getBuildTarget(),
Optional.empty()),
new DxConfig(FakeBuckConfig.builder().build()));
graphEnhancer.createAdditionalBuildables();
ResourcesFilter resourcesFilter = findRuleOfType(ruleResolver, ResourcesFilter.class);
PackageStringAssets packageStringAssetsRule =
findRuleOfType(ruleResolver, PackageStringAssets.class);
MoreAsserts.assertDepends(
"PackageStringAssets must depend on AaptPackageResources",
packageStringAssetsRule,
resourcesFilter);
}
@Test
public void testResourceRulesDependOnRulesBehindResourceSourcePaths() throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(ruleResolver);
SourcePathResolver pathResolver = new SourcePathResolver(ruleFinder);
FakeBuildRule resourcesDep =
ruleResolver.addToIndex(
new FakeBuildRule(
BuildTargetFactory.newInstance("//:resource_dep"),
pathResolver));
resourcesDep.setOutputFile("foo");
AndroidResource resource =
ruleResolver.addToIndex(
new AndroidResource(
new FakeBuildRuleParamsBuilder("//:resources").build()
.copyAppendingExtraDeps(ImmutableSortedSet.of(resourcesDep)),
ruleFinder,
ImmutableSortedSet.of(),
resourcesDep.getSourcePathToOutput(),
ImmutableSortedMap.of(),
null,
null,
ImmutableSortedMap.of(),
new FakeSourcePath("manifest"),
false));
// set it up.
BuildTarget target = BuildTargetFactory.newInstance("//:target");
BuildRuleParams originalParams =
new FakeBuildRuleParamsBuilder(target)
.setDeclaredDeps(ImmutableSortedSet.of(resource))
.build();
AndroidBinaryGraphEnhancer graphEnhancer = new AndroidBinaryGraphEnhancer(
originalParams,
ruleResolver,
ResourcesFilter.ResourceCompressionMode.ENABLED_WITH_STRINGS_AS_ASSETS,
FilterResourcesStep.ResourceFilter.EMPTY_FILTER,
/* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class),
Optional.empty(),
/* locales */ ImmutableSet.of(),
new FakeSourcePath("AndroidManifest.xml"),
AndroidBinary.PackageType.DEBUG,
/* cpuFilters */ ImmutableSet.of(),
/* shouldBuildStringSourceMap */ false,
/* shouldPreDex */ false,
BuildTargets.getScratchPath(
originalParams.getProjectFilesystem(),
target,
"%s/classes.dex"),
DexSplitMode.NO_SPLIT,
/* buildRulesToExcludeFromDex */ ImmutableSet.of(),
/* resourcesToExclude */ ImmutableSet.of(),
/* skipCrunchPngs */ false,
/* includesVectorDrawables */ false,
ANDROID_JAVAC_OPTIONS,
false,
EnumSet.of(ExopackageMode.SECONDARY_DEX),
/* buildConfigValues */ BuildConfigFields.empty(),
/* buildConfigValuesFiles */ Optional.empty(),
/* xzCompressionLevel */ Optional.empty(),
/* trimResourceIds */ false,
/* keepResourcePattern */ Optional.empty(),
/* nativePlatforms */ ImmutableMap.of(),
/* nativeLibraryMergeMap */ Optional.empty(),
/* nativeLibraryMergeGlue */ Optional.empty(),
/* nativeLibraryMergeCodeGenerator */ Optional.empty(),
AndroidBinary.RelinkerMode.DISABLED,
MoreExecutors.newDirectExecutorService(),
/* manifestEntries */ ManifestEntries.empty(),
CxxPlatformUtils.DEFAULT_CONFIG,
new APKModuleGraph(
TargetGraph.EMPTY,
originalParams.getBuildTarget(),
Optional.empty()),
new DxConfig(FakeBuckConfig.builder().build()));
graphEnhancer.createAdditionalBuildables();
ResourcesFilter resourcesFilter = findRuleOfType(ruleResolver, ResourcesFilter.class);
MoreAsserts.assertDepends(
"ResourcesFilter must depend on rules behind resources source paths",
resourcesFilter,
resourcesDep);
}
private <T extends BuildRule> T findRuleOfType(
BuildRuleResolver ruleResolver,
Class<T> ruleClass) {
for (BuildRule rule : ruleResolver.getBuildRules()) {
if (ruleClass.isAssignableFrom(rule.getClass())) {
return ruleClass.cast(rule);
}
}
fail("Could not find build rule of type " + ruleClass.getCanonicalName());
return null;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.direct;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import org.apache.beam.runners.core.DoFnRunners;
import org.apache.beam.runners.core.DoFnRunners.OutputManager;
import org.apache.beam.runners.core.KeyedWorkItem;
import org.apache.beam.runners.core.OutputAndTimeBoundedSplittableProcessElementInvoker;
import org.apache.beam.runners.core.OutputWindowedValue;
import org.apache.beam.runners.core.ProcessFnRunner;
import org.apache.beam.runners.core.SplittableParDoViaKeyedWorkItems.ProcessElements;
import org.apache.beam.runners.core.SplittableParDoViaKeyedWorkItems.ProcessFn;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.runners.AppliedPTransform;
import org.apache.beam.sdk.transforms.DoFnSchemaInformation;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.CacheLoader;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.MoreExecutors;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.joda.time.Duration;
import org.joda.time.Instant;
class SplittableProcessElementsEvaluatorFactory<InputT, OutputT, RestrictionT, PositionT>
implements TransformEvaluatorFactory {
private final ParDoEvaluatorFactory<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>, OutputT>
delegateFactory;
private final ScheduledExecutorService ses;
private final EvaluationContext evaluationContext;
private final PipelineOptions options;
SplittableProcessElementsEvaluatorFactory(
EvaluationContext evaluationContext, PipelineOptions options) {
this.evaluationContext = evaluationContext;
this.options = options;
this.delegateFactory =
new ParDoEvaluatorFactory<>(
evaluationContext,
SplittableProcessElementsEvaluatorFactory
.<InputT, OutputT, RestrictionT>processFnRunnerFactory(),
new CacheLoader<AppliedPTransform<?, ?, ?>, DoFnLifecycleManager>() {
@Override
public DoFnLifecycleManager load(final AppliedPTransform<?, ?, ?> application) {
checkArgument(
ProcessElements.class.isInstance(application.getTransform()),
"No know extraction of the fn from " + application);
final ProcessElements<InputT, OutputT, RestrictionT, PositionT> transform =
(ProcessElements<InputT, OutputT, RestrictionT, PositionT>)
application.getTransform();
return DoFnLifecycleManager.of(transform.newProcessFn(transform.getFn()));
}
},
options);
this.ses =
Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder()
.setThreadFactory(MoreExecutors.platformThreadFactory())
.setNameFormat(
"direct-splittable-process-element-checkpoint-executor_" + hashCode())
.build());
}
@Override
public <T> TransformEvaluator<T> forApplication(
AppliedPTransform<?, ?, ?> application, CommittedBundle<?> inputBundle) throws Exception {
@SuppressWarnings({"unchecked", "rawtypes"})
TransformEvaluator<T> evaluator =
(TransformEvaluator<T>)
createEvaluator((AppliedPTransform) application, (CommittedBundle) inputBundle);
return evaluator;
}
@Override
public void cleanup() throws Exception {
ses.shutdownNow(); // stop before cleaning
delegateFactory.cleanup();
}
@SuppressWarnings({"unchecked", "rawtypes"})
private TransformEvaluator<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>> createEvaluator(
AppliedPTransform<
PCollection<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>>,
PCollectionTuple,
ProcessElements<InputT, OutputT, RestrictionT, PositionT>>
application,
CommittedBundle<InputT> inputBundle)
throws Exception {
final ProcessElements<InputT, OutputT, RestrictionT, PositionT> transform =
application.getTransform();
final DoFnLifecycleManagerRemovingTransformEvaluator<
KeyedWorkItem<byte[], KV<InputT, RestrictionT>>>
evaluator =
delegateFactory.createEvaluator(
(AppliedPTransform) application,
(PCollection<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>>)
inputBundle.getPCollection(),
inputBundle.getKey(),
application.getTransform().getSideInputs(),
application.getTransform().getMainOutputTag(),
application.getTransform().getAdditionalOutputTags().getAll(),
DoFnSchemaInformation.create(),
Collections.emptyMap());
final ParDoEvaluator<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>> pde =
evaluator.getParDoEvaluator();
final ProcessFn<InputT, OutputT, RestrictionT, PositionT> processFn =
(ProcessFn<InputT, OutputT, RestrictionT, PositionT>)
ProcessFnRunner.class.cast(pde.getFnRunner()).getFn();
final DirectExecutionContext.DirectStepContext stepContext = pde.getStepContext();
processFn.setStateInternalsFactory(key -> stepContext.stateInternals());
processFn.setTimerInternalsFactory(key -> stepContext.timerInternals());
OutputWindowedValue<OutputT> outputWindowedValue =
new OutputWindowedValue<OutputT>() {
private final OutputManager outputManager = pde.getOutputManager();
@Override
public void outputWindowedValue(
OutputT output,
Instant timestamp,
Collection<? extends BoundedWindow> windows,
PaneInfo pane) {
outputManager.output(
transform.getMainOutputTag(), WindowedValue.of(output, timestamp, windows, pane));
}
@Override
public <AdditionalOutputT> void outputWindowedValue(
TupleTag<AdditionalOutputT> tag,
AdditionalOutputT output,
Instant timestamp,
Collection<? extends BoundedWindow> windows,
PaneInfo pane) {
outputManager.output(tag, WindowedValue.of(output, timestamp, windows, pane));
}
};
processFn.setProcessElementInvoker(
new OutputAndTimeBoundedSplittableProcessElementInvoker<>(
transform.getFn(),
options,
outputWindowedValue,
evaluationContext.createSideInputReader(transform.getSideInputs()),
ses,
// Setting small values here to stimulate frequent checkpointing and better exercise
// splittable DoFn's in that respect.
100,
Duration.standardSeconds(1)));
return evaluator;
}
private static <InputT, OutputT, RestrictionT>
ParDoEvaluator.DoFnRunnerFactory<KeyedWorkItem<byte[], KV<InputT, RestrictionT>>, OutputT>
processFnRunnerFactory() {
return (options,
fn,
sideInputs,
sideInputReader,
outputManager,
mainOutputTag,
additionalOutputTags,
stepContext,
inputCoder,
outputCoders,
windowingStrategy,
doFnSchemaInformation,
sideInputMapping) -> {
ProcessFn<InputT, OutputT, RestrictionT, ?> processFn = (ProcessFn) fn;
return DoFnRunners.newProcessFnRunner(
processFn,
options,
sideInputs,
sideInputReader,
outputManager,
mainOutputTag,
additionalOutputTags,
stepContext,
inputCoder,
outputCoders,
windowingStrategy,
doFnSchemaInformation,
sideInputMapping);
};
}
}
|
|
/*
* Copyright 2014 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.jscomp.CompilerOptions.LanguageMode;
/**
* Test case for {@link Es6RenameVariablesInParamLists}.
*
* @author moz@google.com (Michael Zhou)
*/
public final class Es6RenameVariablesInParamListsTest extends CompilerTestCase {
@Override
public void setUp() {
setAcceptedLanguage(LanguageMode.ECMASCRIPT6);
runTypeCheckAfterProcessing = true;
}
@Override
protected CompilerOptions getOptions() {
CompilerOptions options = super.getOptions();
options.setLanguageOut(LanguageMode.ECMASCRIPT3);
return options;
}
@Override
public CompilerPass getProcessor(Compiler compiler) {
return new Es6RenameVariablesInParamLists(compiler);
}
@Override
protected int getNumRepetitions() {
return 1;
}
public void testRenameVar() {
test("var x = 5; function f(y=x) { var x; }",
"var x = 5; function f(y=x) { var x$0; }");
test(
LINE_JOINER.join(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x; y++;",
"}"),
LINE_JOINER.join(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x$0; y++;",
"}"));
test(
LINE_JOINER.join(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x;",
" { let x; x++; }",
" x++;",
"}"),
LINE_JOINER.join(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x$0;",
" { let x; x++; }",
" x$0++;",
"}"));
test(
LINE_JOINER.join(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x; { x++ };",
"}"),
LINE_JOINER.join(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x$0; { x$0++ };",
"}"));
test(
LINE_JOINER.join(
"function f(a = x, b = y) {",
" var y, x;",
" return function() { var x = () => y };",
"}"),
LINE_JOINER.join(
"function f(a = x, b = y) {",
" var y$0, x$1;",
" return function() { var x = () => y$0 };",
"}"));
test(
LINE_JOINER.join(
"var x = 4;", "function f(a=x) { let x = 5; { let x = 99; } return a + x; }"),
LINE_JOINER.join(
"var x = 4;", "function f(a=x) { let x$0 = 5; { let x = 99; } return a + x$0; }"));
}
public void testRenameFunction() {
test(
LINE_JOINER.join(
"function x() {}", "function f(y=x()) {", " x();", " function x() {}", "}"),
LINE_JOINER.join(
"function x() {}", "function f(y=x()) {", " x$0();", " function x$0() {}", "}"));
}
public void testGlobalDeclaration() {
test(
LINE_JOINER.join(
"function x() {}",
"function f(y=(function y() { w = 5; return w; }())) {",
" let x = w;",
" var w = 3;",
" return w;",
"}"),
LINE_JOINER.join(
"function x() {}",
"function f(y=(function y() { w = 5; return w; }())) {",
" let x = w$0;",
" var w$0 = 3;",
" return w$0;",
"}"));
testSame(
LINE_JOINER.join(
"function x() {}",
"function f(y=(function () { w = 5; return w; }())) {",
" w;",
" return w;",
"}"));
test(
LINE_JOINER.join(
"function x() {}",
"function f(y=(function () { w = 5; return w; }())) {",
" w;",
" var w = 3;",
" return w;",
"}"),
LINE_JOINER.join(
"function x() {}",
"function f(y=(function () { w = 5; return w; }())) {",
" w$0;",
" var w$0 = 3;",
" return w$0;",
"}"));
test(
LINE_JOINER.join(
"function x() {}",
"function f(y=(function () { w = 5; return w; }())) {",
" w;",
" let w = 3;",
" return w;",
"}"),
LINE_JOINER.join(
"function x() {}",
"function f(y=(function () { w = 5; return w; }())) {",
" w$0;",
" let w$0 = 3;",
" return w$0;",
"}"));
}
public void testMultipleDefaultParams() {
test(
LINE_JOINER.join(
"function x() {}",
"var y = 1;",
"function f(z=x, w=y) {",
" let x = y;",
" var y = 3;",
" return w;",
"}"),
LINE_JOINER.join(
"function x() {}",
"var y = 1;",
"function f(z=x, w=y) {",
" let x$0 = y$1;",
" var y$1 = 3;",
" return w;",
"}"));
test(
LINE_JOINER.join(
"function x() {}",
"var y = 1;",
"function f(z=x, w=y) {",
" var x;",
" { let y; y++; }",
" { var y; y++; }",
" x++;",
"}"),
LINE_JOINER.join(
"function x() {}",
"var y = 1;",
"function f(z=x, w=y) {",
" var x$0;",
" { let y; y++; }",
" { var y$1; y$1++; }",
" x$0++;",
"}"));
}
public void testArrow() {
testSame("var x = true; var f = (a=x) => x;");
test("var x = true; var f = (a=x) => { var x = false; return a; }",
"var x = true; var f = (a=x) => { var x$0 = false; return a; }");
}
}
|
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.designer.explorer.server;
import com.eas.client.resourcepool.DatasourcesArgsConsumer;
import com.eas.designer.application.project.PlatypusProject;
import com.eas.designer.application.project.PlatypusProjectSettings;
import com.eas.designer.explorer.project.ProjectRunner;
import static com.eas.designer.explorer.project.ProjectRunner.getCommandLineStr;
import static com.eas.designer.explorer.project.ProjectRunner.setLogging;
import com.eas.server.PlatypusServer;
import com.eas.server.ServerMain;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Future;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.event.ChangeListener;
import org.netbeans.api.db.explorer.ConnectionManager;
import org.netbeans.api.db.explorer.DatabaseConnection;
import org.netbeans.api.extexecution.ExecutionDescriptor;
import org.netbeans.api.extexecution.ExecutionService;
import org.netbeans.api.extexecution.ExternalProcessBuilder;
import org.netbeans.spi.server.ServerInstanceImplementation;
import org.openide.nodes.Node;
import org.openide.util.ChangeSupport;
import org.openide.util.NbBundle;
import org.openide.windows.InputOutput;
/**
* Platypus standalone development server.
*
* @author vv
*/
public final class PlatypusServerInstance implements Server, ServerInstanceImplementation {
private final String PLATYPUS_SERVER_NAME = "Platypus Server"; // NOI18N
private final String PLATYPUS_SERVER_INSTANCE_NAME = "Platypus Server"; // NOI18N
private static final String ANY_LOCAL_ADRESS = "0.0.0.0";// NOI18N
private static final String ARGUMENT_SEPARATOR = ":";// NOI18N
private static final String SERVER_APP_NAME = "Server.jar"; //NOI18N
private JPanel customizer;
private Future<Integer> serverRunTask;
private volatile ServerState serverState = ServerState.STOPPED;
private PlatypusProject project;
private final ChangeSupport changeSupport = new ChangeSupport(this);
@Override
public Node getFullNode() {
return new PlatypusServerNode(this);
}
@Override
public Node getBasicNode() {
return new PlatypusServerNode(this);
}
@Override
public JComponent getCustomizer() {
synchronized (this) {
if (customizer == null) {
customizer = new PlatypusServerCustomizer(this);
}
return customizer;
}
}
@Override
public String getDisplayName() {
return PLATYPUS_SERVER_INSTANCE_NAME;
}
@Override
public String getServerDisplayName() {
return PLATYPUS_SERVER_NAME;
}
@Override
public boolean isRemovable() {
return false;
}
@Override
public void remove() {
//do not remove dev server
}
public PlatypusProject getProject() {
return project;
}
@Override
public ServerState getServerState() {
return serverState;
}
@Override
public void setServerState(ServerState aServerState) {
serverState = aServerState;
changeSupport.fireChange();
}
public void addChangeListener(final ChangeListener listener) {
changeSupport.addChangeListener(listener);
}
public void removeChangeListener(final ChangeListener listener) {
changeSupport.removeChangeListener(listener);
}
public boolean start(PlatypusProject aProject, File binDir, boolean debug) {
project = aProject;
assert project != null;
setServerState(ServerState.STARTING);
final InputOutput io = project.getOutputWindowIO();
ExecutionDescriptor descriptor = new ExecutionDescriptor()
.frontWindow(true)
.controllable(true)
.preExecution(() -> {
})
.postExecution(() -> {
setServerState(ServerState.STOPPED);
serverRunTask = null;
io.getOut().println(NbBundle.getMessage(PlatypusServerInstance.class, "MSG_Server_Stopped"));//NOI18N
io.getOut().println();
});
//ExternalProcessBuilder processBuilder = new ExternalProcessBuilder(ProjectRunner.JVM_RUN_COMMAND_NAME);
List<String> arguments = new ArrayList<>();
if (project.getSettings().getRunServerVmOptions() != null && !project.getSettings().getRunServerVmOptions().isEmpty()) {
ProjectRunner.addArguments(arguments, project.getSettings().getRunServerVmOptions());
io.getOut().println(String.format(NbBundle.getMessage(PlatypusServerInstance.class, "MSG_VM_Run_Options"),//NOI18N
project.getSettings().getRunServerVmOptions()));
}
if (debug) {
ProjectRunner.setDebugArguments(arguments, project.getSettings().getDebugServerPort());
}
io.getOut().println(String.format(NbBundle.getMessage(ProjectRunner.class, "MSG_Logging_Level"), project.getSettings().getClientLogLevel()));//NOI18N
setLogging(arguments, project.getSettings().getServerLogLevel());
PlatypusProjectSettings pps = project.getSettings();
arguments.add(ProjectRunner.OPTION_PREFIX + ProjectRunner.CLASSPATH_OPTION_NAME);
String classPath = ProjectRunner.getExtendedClasspath(ProjectRunner.getApiClasspath(getExecutablePath(binDir)));
arguments.add("\"" + classPath + "\"");
arguments.add(ServerMain.class.getName());
// Iterate through all datasources, registered in the designer.
// Apply them as datasources in considered server.
DatabaseConnection defaultDatabaseConnection = null;
DatabaseConnection[] dataSources = ConnectionManager.getDefault().getConnections();
for (DatabaseConnection connection : dataSources) {
if (ProjectRunner.isConnectionValid(connection)) {
if (connection.getDisplayName() == null ? pps.getDefaultDataSourceName() == null : connection.getDisplayName().equals(pps.getDefaultDataSourceName())) {
defaultDatabaseConnection = connection;
}
arguments.add(ProjectRunner.OPTION_PREFIX + DatasourcesArgsConsumer.DB_RESOURCE_CONF_PARAM);
arguments.add(connection.getDisplayName());// Hack because of netbeans
arguments.add(ProjectRunner.OPTION_PREFIX + DatasourcesArgsConsumer.DB_URL_CONF_PARAM);
arguments.add(connection.getDatabaseURL());
arguments.add(ProjectRunner.OPTION_PREFIX + DatasourcesArgsConsumer.DB_USERNAME_CONF_PARAM);
arguments.add(connection.getUser());
if(connection.getPassword() != null && !connection.getPassword().isEmpty()){
arguments.add(ProjectRunner.OPTION_PREFIX + DatasourcesArgsConsumer.DB_PASSWORD_CONF_PARAM);
arguments.add(connection.getPassword());
}
if (connection.getSchema() != null && !connection.getSchema().isEmpty()) {
arguments.add(ProjectRunner.OPTION_PREFIX + DatasourcesArgsConsumer.DB_SCHEMA_CONF_PARAM);
arguments.add(connection.getSchema());
}
} else {
io.getErr().println(NbBundle.getMessage(ProjectRunner.class, "MSG_Invalid_Database", connection.getDisplayName()));
}
}
if (defaultDatabaseConnection != null) {
arguments.add(ProjectRunner.OPTION_PREFIX + ServerMain.DEF_DATASOURCE_CONF_PARAM);
arguments.add(pps.getDefaultDataSourceName());
} else if (pps.getDefaultDataSourceName() != null && !pps.getDefaultDataSourceName().isEmpty()) {
io.getErr().println(NbBundle.getMessage(PlatypusServerInstance.class, "MSG_Missing_App_Database"));
}
arguments.add(ProjectRunner.OPTION_PREFIX + ServerMain.APP_ELEMENT_CONF_PARAM);
arguments.add(PlatypusProjectSettings.START_JS_FILE_NAME);
arguments.add(ProjectRunner.OPTION_PREFIX + ServerMain.APP_URL_CONF_PARAM);
arguments.add(project.getProjectDirectory().toURI().toASCIIString());
io.getOut().println(String.format(NbBundle.getMessage(PlatypusServerInstance.class, "MSG_App_Sources"),//NOI18N
project.getProjectDirectory().toURI().toASCIIString()));
if (!ProjectRunner.isSetByOption(ServerMain.IFACE_CONF_PARAM, project.getSettings().getRunClientOptions())) {
arguments.add(ProjectRunner.OPTION_PREFIX + ServerMain.IFACE_CONF_PARAM);
arguments.add(getListenInterfaceArgument(project.getSettings()));
io.getOut().println(String.format(NbBundle.getMessage(PlatypusServerInstance.class, "MSG_Server_Interface"),//NOI18N
getListenInterfaceArgument(project.getSettings())));
}
if (!ProjectRunner.isSetByOption(ServerMain.PROTOCOLS_CONF_PARAM, project.getSettings().getRunClientOptions())) {
arguments.add(ProjectRunner.OPTION_PREFIX + ServerMain.PROTOCOLS_CONF_PARAM);
arguments.add(getProtocol(project.getSettings()));
io.getOut().println(String.format(NbBundle.getMessage(PlatypusServerInstance.class, "MSG_Server_Protocol"), getProtocol(project.getSettings())));//NOI18N
}
if (project.getSettings().getRunClientOptions() != null && !project.getSettings().getRunClientOptions().isEmpty()) {
ProjectRunner.addArguments(arguments, project.getSettings().getRunClientOptions());
io.getOut().println(String.format(NbBundle.getMessage(PlatypusServerInstance.class, "MSG_Run_Options"),//NOI18N
project.getSettings().getRunClientOptions()));
}
ExternalProcessBuilder processBuilder = new ExternalProcessBuilder(ProjectRunner.JVM_RUN_COMMAND_NAME);
for (String argument : arguments) {
processBuilder = processBuilder.addArgument(argument);
}
ExecutionService service = ExecutionService.newService(processBuilder, descriptor, "Platypus Server");
io.getOut().println(NbBundle.getMessage(ProjectRunner.class, "MSG_Command_Line") + getCommandLineStr(arguments));//NOI18N
Future<Integer> runTask = service.run();
serverRunTask = runTask;
return true;
}
public void stop() {
if (serverRunTask != null) {
serverRunTask.cancel(true);
serverRunTask = null;
}
}
private static String getExecutablePath(File aBinDir) {
File clientAppExecutable = new File(aBinDir, SERVER_APP_NAME);
if (!clientAppExecutable.exists()) {
throw new IllegalStateException("Platypus Server executable not exists.");
}
return clientAppExecutable.getAbsolutePath();
}
private static String getListenInterfaceArgument(PlatypusProjectSettings settings) {
return ANY_LOCAL_ADRESS + ARGUMENT_SEPARATOR + settings.getServerPort();
}
private static String getProtocol(PlatypusProjectSettings settings) {
return settings.getServerPort() + ARGUMENT_SEPARATOR + PlatypusServer.DEFAULT_PROTOCOL;
}
}
|
|
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.mail;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.gerrit.common.data.GroupDescription;
import com.google.gerrit.common.data.GroupDescriptions;
import com.google.gerrit.common.data.GroupReference;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.AccountGroupMember;
import com.google.gerrit.reviewdb.client.AccountProjectWatch;
import com.google.gerrit.reviewdb.client.AccountProjectWatch.NotifyType;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.git.NotifyConfig;
import com.google.gerrit.server.project.ProjectState;
import com.google.gerrit.server.query.Predicate;
import com.google.gerrit.server.query.QueryParseException;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gerrit.server.query.change.ChangeQueryBuilder;
import com.google.gerrit.server.query.change.SingleGroupUser;
import com.google.gwtorm.server.OrmException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class ProjectWatch {
private static final Logger log = LoggerFactory.getLogger(ProjectWatch.class);
protected final EmailArguments args;
protected final ProjectState projectState;
protected final Project.NameKey project;
protected final ChangeData changeData;
public ProjectWatch(EmailArguments args, Project.NameKey project,
ProjectState projectState, ChangeData changeData) {
this.args = args;
this.project = project;
this.projectState = projectState;
this.changeData = changeData;
}
/** Returns all watchers that are relevant */
public final Watchers getWatchers(NotifyType type) throws OrmException {
Watchers matching = new Watchers();
Set<Account.Id> projectWatchers = new HashSet<Account.Id>();
for (AccountProjectWatch w : args.db.get().accountProjectWatches()
.byProject(project)) {
if (w.isNotify(type)) {
projectWatchers.add(w.getAccountId());
add(matching, w);
}
}
for (AccountProjectWatch w : args.db.get().accountProjectWatches()
.byProject(args.allProjectsName)) {
if (!projectWatchers.contains(w.getAccountId()) && w.isNotify(type)) {
add(matching, w);
}
}
for (ProjectState state : projectState.tree()) {
for (NotifyConfig nc : state.getConfig().getNotifyConfigs()) {
if (nc.isNotify(type)) {
try {
add(matching, nc, state.getProject().getNameKey());
} catch (QueryParseException e) {
log.warn(String.format(
"Project %s has invalid notify %s filter \"%s\"",
state.getProject().getName(), nc.getName(),
nc.getFilter()), e);
}
}
}
}
return matching;
}
public static class Watchers {
static class List {
protected final Set<Account.Id> accounts = Sets.newHashSet();
protected final Set<Address> emails = Sets.newHashSet();
}
protected final List to = new List();
protected final List cc = new List();
protected final List bcc = new List();
List list(NotifyConfig.Header header) {
switch (header) {
case TO:
return to;
case CC:
return cc;
default:
case BCC:
return bcc;
}
}
}
private void add(Watchers matching, NotifyConfig nc, Project.NameKey project)
throws OrmException, QueryParseException {
for (GroupReference ref : nc.getGroups()) {
CurrentUser user = new SingleGroupUser(args.capabilityControlFactory,
ref.getUUID());
if (filterMatch(user, nc.getFilter())) {
deliverToMembers(matching.list(nc.getHeader()), ref.getUUID());
}
}
if (!nc.getAddresses().isEmpty()) {
if (filterMatch(null, nc.getFilter())) {
matching.list(nc.getHeader()).emails.addAll(nc.getAddresses());
}
}
}
private void deliverToMembers(
Watchers.List matching,
AccountGroup.UUID startUUID) throws OrmException {
ReviewDb db = args.db.get();
Set<AccountGroup.UUID> seen = Sets.newHashSet();
List<AccountGroup.UUID> q = Lists.newArrayList();
seen.add(startUUID);
q.add(startUUID);
while (!q.isEmpty()) {
AccountGroup.UUID uuid = q.remove(q.size() - 1);
GroupDescription.Basic group = args.groupBackend.get(uuid);
if (!Strings.isNullOrEmpty(group.getEmailAddress())) {
// If the group has an email address, do not expand membership.
matching.emails.add(new Address(group.getEmailAddress()));
continue;
}
AccountGroup ig = GroupDescriptions.toAccountGroup(group);
if (ig == null) {
// Non-internal groups cannot be expanded by the server.
continue;
}
for (AccountGroupMember m : db.accountGroupMembers().byGroup(ig.getId())) {
matching.accounts.add(m.getAccountId());
}
for (AccountGroup.UUID m : args.groupIncludes.membersOf(uuid)) {
if (seen.add(m)) {
q.add(m);
}
}
}
}
private void add(Watchers matching, AccountProjectWatch w)
throws OrmException {
IdentifiedUser user =
args.identifiedUserFactory.create(args.db, w.getAccountId());
try {
if (filterMatch(user, w.getFilter())) {
matching.bcc.accounts.add(w.getAccountId());
}
} catch (QueryParseException e) {
// Ignore broken filter expressions.
}
}
private boolean filterMatch(CurrentUser user, String filter)
throws OrmException, QueryParseException {
ChangeQueryBuilder qb;
Predicate<ChangeData> p = null;
if (user == null) {
qb = args.queryBuilder.create(args.anonymousUser);
} else {
qb = args.queryBuilder.create(user);
p = qb.is_visible();
}
if (filter != null) {
Predicate<ChangeData> filterPredicate = qb.parse(filter);
if (p == null) {
p = filterPredicate;
} else {
p = Predicate.and(filterPredicate, p);
}
}
return p == null || p.match(changeData);
}
}
|
|
package org.jgroups.tests;
import org.jgroups.util.StackType;
import org.jgroups.util.Util;
import java.net.*;
import java.util.*;
/**
* Discovers all UDP-based members running on a certain mcast address
* @author Bela Ban
*/
public class Probe {
protected MulticastSocket mcast_sock;
protected volatile boolean running=true;
protected final Set<String> senders=new HashSet<>();
public Probe() {
}
public void start(List<InetAddress> addrs, InetAddress bind_addr, int port, int ttl,
final long timeout, List<String> query, String match,
boolean weed_out_duplicates, String passcode) throws Exception {
for(InetAddress addr: addrs) {
boolean unicast_dest=addr != null && !addr.isMulticastAddress();
if(unicast_dest) {
Collection<InetAddress> targets=getPhysicalAddresses(addr, bind_addr, port, timeout);
if(targets == null || targets.isEmpty()) {
System.err.println("Found no valid hosts - terminating");
return;
}
for(InetAddress target : targets)
sendRequest(target, bind_addr, port, ttl, query, passcode);
}
else
sendRequest(addr, bind_addr, port, ttl, query, passcode);
}
new Thread(() -> {
Util.sleep(timeout);
mcast_sock.close();
running=false;
}).start();
int matched=0, not_matched=0, count=0;
System.out.println("\n");
while(running) {
byte[] buf=new byte[70000];
DatagramPacket rsp=new DatagramPacket(buf, 0, buf.length);
try {
mcast_sock.receive(rsp);
}
catch(Throwable t) {
System.out.println("\n");
break;
}
byte[] data=rsp.getData();
String response=new String(data, 0, rsp.getLength());
if(weed_out_duplicates && checkDuplicateResponse(response))
continue;
count++;
if(matches(response, match)) {
matched++;
System.out.printf("#%d (%d bytes):\n%s\n", count, rsp.getLength(), response);
}
else
not_matched++;
}
System.out.printf("%d responses (%d matches, %d non matches)\n", count, matched, not_matched);
}
protected static Collection<InetAddress> getPhysicalAddresses(InetAddress addr, InetAddress bind_addr,
int port, final long timeout) throws Exception {
final DatagramSocket sock=new DatagramSocket(new InetSocketAddress(bind_addr, 0));
byte[] payload="member-addrs".getBytes();
DatagramPacket probe=new DatagramPacket(payload, 0, payload.length, addr, port);
sock.send(probe);
new Thread(() -> {
Util.sleep(timeout);
sock.close();
}).start();
long end_time=System.currentTimeMillis() + timeout;
while(System.currentTimeMillis() < end_time) {
byte[] buf=new byte[70000];
DatagramPacket rsp=new DatagramPacket(buf, 0, buf.length);
try {
sock.receive(rsp);
}
catch(Throwable t) {
break;
}
byte[] data=rsp.getData();
String response=new String(data, 0, rsp.getLength());
Collection<InetAddress> retval=parseAddresses(response);
if(retval != null && !retval.isEmpty())
return retval;
}
return null;
}
protected static Collection<InetAddress> parseAddresses(String input) throws Exception {
final String ADDRS="member-addrs=";
Collection<InetAddress> retval=new ArrayList<>();
int start_index=-1, end_index=-1;
if(input != null && (start_index=input.indexOf(ADDRS)) >= 0) {
input=input.substring(start_index + ADDRS.length()).trim();
end_index=input.indexOf('\n');
if(end_index > 0)
input=input.substring(0, end_index);
List<String> rsps=Util.parseStringList(input,",");
for(String tmp: rsps) {
int index2=tmp.lastIndexOf(':');
if(index2 != -1)
tmp=tmp.substring(0, index2);
retval.add(InetAddress.getByName(tmp));
}
}
return retval;
}
protected void sendRequest(InetAddress addr, InetAddress bind_addr, int port, int ttl,
List<String> query, String passcode) throws Exception {
if(mcast_sock == null) {
mcast_sock=new MulticastSocket();
mcast_sock.setTimeToLive(ttl);
if(bind_addr != null)
mcast_sock.setInterface(bind_addr);
}
StringBuilder request=new StringBuilder();
byte[] authenticationDigest = null;
if(passcode != null){
long t1 = (new Date()).getTime();
double q1 = Math.random();
authenticationDigest = Util.createAuthenticationDigest(passcode, t1, q1);
}
for(int i=0; i < query.size(); i++) {
request.append(query.get(i)).append(" ");
}
byte[] queryPayload = request.toString().getBytes();
byte[] payload = queryPayload;
if (authenticationDigest != null) {
payload = new byte[authenticationDigest.length + queryPayload.length];
System.arraycopy(authenticationDigest, 0, payload, 0, authenticationDigest.length);
System.arraycopy(queryPayload, 0, payload, authenticationDigest.length, queryPayload.length);
}
DatagramPacket probe=new DatagramPacket(payload, 0, payload.length, addr, port);
mcast_sock.send(probe);
// System.out.printf("-- sending probe request to %s:%d\n", addr, port);
}
private boolean checkDuplicateResponse(String response) {
int index=response.indexOf("local_addr");
if(index != -1) {
String addr=parseAddress(response.substring(index+1 + "local_addr".length()));
return senders.add(addr) == false;
}
return false;
}
private static String parseAddress(String response) {
StringTokenizer st=new StringTokenizer(response);
return st.nextToken();
}
private static boolean matches(String response, String match) {
if(response == null)
return false;
if(match == null)
return true;
int index=response.indexOf(match);
return index > -1;
}
public static void main(String[] args) {
InetAddress bind_addr=null;
List<InetAddress> addrs=new ArrayList<>();
int port=0;
int ttl=32;
long timeout=500;
final String DEFAULT_DIAG_ADDR="224.0.75.75";
final String DEFAULT_DIAG_ADDR_IPv6="ff0e::0:75:75";
final int DEFAULT_DIAG_PORT=7500;
List<String> query=new ArrayList<>();
String match=null;
boolean weed_out_duplicates=false;
String passcode=null;
try {
for(int i=0; i < args.length; i++) {
if("-addr".equals(args[i])) {
addrs.add(InetAddress.getByName(args[++i]));
continue;
}
if("-bind_addr".equals(args[i])) {
bind_addr=InetAddress.getByName(args[++i]);
continue;
}
if("-port".equals(args[i])) {
port=Integer.parseInt(args[++i]);
continue;
}
if("-ttl".equals(args[i])) {
ttl=Integer.parseInt(args[++i]);
continue;
}
if("-timeout".equals(args[i])) {
timeout=Long.parseLong(args[++i]);
continue;
}
if("-match".equals(args[i])) {
match=args[++i];
continue;
}
if("-weed_out_duplicates".equals(args[i])) {
weed_out_duplicates=true;
continue;
}
if("-passcode".equals(args[i])) {
passcode=args[++i];
continue;
}
if("-cluster".equals(args[i])) {
String cluster=args[++i];
query.add("cluster=" + cluster);
continue;
}
/* if("-node".equals(args[i])) {
String node=args[++i];
query.add("node=" + node);
continue;
}*/
if("-help".equals(args[i]) || "-h".equals(args[i]) || "--help".equals(args[i])) {
help();
return;
}
query.add(args[i]);
}
Probe p=new Probe();
if(addrs.isEmpty()) {
StackType stack_type=Util.getIpStackType();
boolean ipv6=stack_type == StackType.IPv6;
InetAddress addr=InetAddress.getByName(ipv6? DEFAULT_DIAG_ADDR_IPv6 : DEFAULT_DIAG_ADDR);
addrs.add(addr);
}
if(port == 0)
port=DEFAULT_DIAG_PORT;
p.start(addrs, bind_addr, port, ttl, timeout, query, match, weed_out_duplicates, passcode);
}
catch(Throwable t) {
t.printStackTrace();
}
}
static void help() {
System.out.println("Probe [-help] [-addr <addr>] [-bind_addr <addr>] " +
"[-port <port>] [-ttl <ttl>] [-timeout <timeout>] [-passcode <code>] [-weed_out_duplicates] " +
"[-cluster regexp-pattern] [-match pattern] [key[=value]]*\n\n" +
"Examples:\n" +
"probe.sh keys // dumps all valid commands\n" +
"probe.sh jmx=NAKACK // dumps JMX info about all NAKACK protocols\n" +
"probe.sh op=STABLE.runMessageGarbageCollection // invokes the method in all STABLE protocols\n" +
"probe.sh jmx=UDP.oob,thread_pool // dumps all attrs of UDP starting with oob* or thread_pool*\n" +
"probe.sh jmx=FLUSH.bypass=true\n");
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.metrics.util;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.MetricOptions;
import org.apache.flink.metrics.Gauge;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.runtime.clusterframework.BootstrapTools;
import org.apache.flink.runtime.clusterframework.types.ResourceID;
import org.apache.flink.runtime.metrics.MetricNames;
import org.apache.flink.runtime.metrics.MetricRegistry;
import org.apache.flink.runtime.metrics.groups.JobManagerMetricGroup;
import org.apache.flink.runtime.metrics.groups.TaskManagerMetricGroup;
import org.apache.flink.runtime.rpc.RpcService;
import org.apache.flink.runtime.rpc.akka.AkkaRpcServiceUtils;
import org.apache.flink.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.management.AttributeNotFoundException;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.ReflectionException;
import java.lang.management.ClassLoadingMXBean;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
import java.lang.management.ThreadMXBean;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static org.apache.flink.runtime.metrics.util.SystemResourcesMetricsInitializer.instantiateSystemMetrics;
/**
* Utility class to register pre-defined metric sets.
*/
public class MetricUtils {
private static final Logger LOG = LoggerFactory.getLogger(MetricUtils.class);
private static final String METRIC_GROUP_STATUS_NAME = "Status";
private static final String METRICS_ACTOR_SYSTEM_NAME = "flink-metrics";
static final String METRIC_GROUP_HEAP_NAME = "Heap";
static final String METRIC_GROUP_NONHEAP_NAME = "NonHeap";
private MetricUtils() {
}
public static JobManagerMetricGroup instantiateJobManagerMetricGroup(
final MetricRegistry metricRegistry,
final String hostname,
final Optional<Time> systemResourceProbeInterval) {
final JobManagerMetricGroup jobManagerMetricGroup = new JobManagerMetricGroup(
metricRegistry,
hostname);
MetricGroup statusGroup = jobManagerMetricGroup.addGroup(METRIC_GROUP_STATUS_NAME);
// initialize the JM metrics
instantiateStatusMetrics(statusGroup);
if (systemResourceProbeInterval.isPresent()) {
instantiateSystemMetrics(jobManagerMetricGroup, systemResourceProbeInterval.get());
}
return jobManagerMetricGroup;
}
public static Tuple2<TaskManagerMetricGroup, MetricGroup> instantiateTaskManagerMetricGroup(
MetricRegistry metricRegistry,
String hostName,
ResourceID resourceID,
Optional<Time> systemResourceProbeInterval) {
final TaskManagerMetricGroup taskManagerMetricGroup = new TaskManagerMetricGroup(
metricRegistry,
hostName,
resourceID.toString());
MetricGroup statusGroup = taskManagerMetricGroup.addGroup(METRIC_GROUP_STATUS_NAME);
// Initialize the TM metrics
instantiateStatusMetrics(statusGroup);
if (systemResourceProbeInterval.isPresent()) {
instantiateSystemMetrics(taskManagerMetricGroup, systemResourceProbeInterval.get());
}
return Tuple2.of(taskManagerMetricGroup, statusGroup);
}
public static void instantiateStatusMetrics(
MetricGroup metricGroup) {
MetricGroup jvm = metricGroup.addGroup("JVM");
instantiateClassLoaderMetrics(jvm.addGroup("ClassLoader"));
instantiateGarbageCollectorMetrics(jvm.addGroup("GarbageCollector"));
instantiateMemoryMetrics(jvm.addGroup("Memory"));
instantiateThreadMetrics(jvm.addGroup("Threads"));
instantiateCPUMetrics(jvm.addGroup("CPU"));
}
public static RpcService startMetricsRpcService(Configuration configuration, String hostname) throws Exception {
final String portRange = configuration.getString(MetricOptions.QUERY_SERVICE_PORT);
final int threadPriority = configuration.getInteger(MetricOptions.QUERY_SERVICE_THREAD_PRIORITY);
return AkkaRpcServiceUtils.createRpcService(
hostname,
portRange,
configuration,
METRICS_ACTOR_SYSTEM_NAME,
new BootstrapTools.FixedThreadPoolExecutorConfiguration(1, 1, threadPriority));
}
private static void instantiateClassLoaderMetrics(MetricGroup metrics) {
final ClassLoadingMXBean mxBean = ManagementFactory.getClassLoadingMXBean();
metrics.<Long, Gauge<Long>>gauge("ClassesLoaded", mxBean::getTotalLoadedClassCount);
metrics.<Long, Gauge<Long>>gauge("ClassesUnloaded", mxBean::getUnloadedClassCount);
}
private static void instantiateGarbageCollectorMetrics(MetricGroup metrics) {
List<GarbageCollectorMXBean> garbageCollectors = ManagementFactory.getGarbageCollectorMXBeans();
for (final GarbageCollectorMXBean garbageCollector: garbageCollectors) {
MetricGroup gcGroup = metrics.addGroup(garbageCollector.getName());
gcGroup.<Long, Gauge<Long>>gauge("Count", garbageCollector::getCollectionCount);
gcGroup.<Long, Gauge<Long>>gauge("Time", garbageCollector::getCollectionTime);
}
}
private static void instantiateMemoryMetrics(MetricGroup metrics) {
instantiateHeapMemoryMetrics(metrics.addGroup(METRIC_GROUP_HEAP_NAME));
instantiateNonHeapMemoryMetrics(metrics.addGroup(METRIC_GROUP_NONHEAP_NAME));
final MBeanServer con = ManagementFactory.getPlatformMBeanServer();
final String directBufferPoolName = "java.nio:type=BufferPool,name=direct";
try {
final ObjectName directObjectName = new ObjectName(directBufferPoolName);
MetricGroup direct = metrics.addGroup("Direct");
direct.<Long, Gauge<Long>>gauge("Count", new AttributeGauge<>(con, directObjectName, "Count", -1L));
direct.<Long, Gauge<Long>>gauge("MemoryUsed", new AttributeGauge<>(con, directObjectName, "MemoryUsed", -1L));
direct.<Long, Gauge<Long>>gauge("TotalCapacity", new AttributeGauge<>(con, directObjectName, "TotalCapacity", -1L));
} catch (MalformedObjectNameException e) {
LOG.warn("Could not create object name {}.", directBufferPoolName, e);
}
final String mappedBufferPoolName = "java.nio:type=BufferPool,name=mapped";
try {
final ObjectName mappedObjectName = new ObjectName(mappedBufferPoolName);
MetricGroup mapped = metrics.addGroup("Mapped");
mapped.<Long, Gauge<Long>>gauge("Count", new AttributeGauge<>(con, mappedObjectName, "Count", -1L));
mapped.<Long, Gauge<Long>>gauge("MemoryUsed", new AttributeGauge<>(con, mappedObjectName, "MemoryUsed", -1L));
mapped.<Long, Gauge<Long>>gauge("TotalCapacity", new AttributeGauge<>(con, mappedObjectName, "TotalCapacity", -1L));
} catch (MalformedObjectNameException e) {
LOG.warn("Could not create object name {}.", mappedBufferPoolName, e);
}
}
@VisibleForTesting
static void instantiateHeapMemoryMetrics(final MetricGroup metricGroup) {
instantiateMemoryUsageMetrics(metricGroup, () -> ManagementFactory.getMemoryMXBean().getHeapMemoryUsage());
}
@VisibleForTesting
static void instantiateNonHeapMemoryMetrics(final MetricGroup metricGroup) {
instantiateMemoryUsageMetrics(metricGroup, () -> ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage());
}
private static void instantiateMemoryUsageMetrics(final MetricGroup metricGroup, final Supplier<MemoryUsage> memoryUsageSupplier) {
metricGroup.<Long, Gauge<Long>>gauge(MetricNames.MEMORY_USED, () -> memoryUsageSupplier.get().getUsed());
metricGroup.<Long, Gauge<Long>>gauge(MetricNames.MEMORY_COMMITTED, () -> memoryUsageSupplier.get().getCommitted());
metricGroup.<Long, Gauge<Long>>gauge(MetricNames.MEMORY_MAX, () -> memoryUsageSupplier.get().getMax());
}
private static void instantiateThreadMetrics(MetricGroup metrics) {
final ThreadMXBean mxBean = ManagementFactory.getThreadMXBean();
metrics.<Integer, Gauge<Integer>>gauge("Count", mxBean::getThreadCount);
}
private static void instantiateCPUMetrics(MetricGroup metrics) {
try {
final com.sun.management.OperatingSystemMXBean mxBean = (com.sun.management.OperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
metrics.<Double, Gauge<Double>>gauge("Load", mxBean::getProcessCpuLoad);
metrics.<Long, Gauge<Long>>gauge("Time", mxBean::getProcessCpuTime);
} catch (Exception e) {
LOG.warn("Cannot access com.sun.management.OperatingSystemMXBean.getProcessCpuLoad()" +
" - CPU load metrics will not be available.", e);
}
}
private static final class AttributeGauge<T> implements Gauge<T> {
private final MBeanServer server;
private final ObjectName objectName;
private final String attributeName;
private final T errorValue;
private AttributeGauge(MBeanServer server, ObjectName objectName, String attributeName, T errorValue) {
this.server = Preconditions.checkNotNull(server);
this.objectName = Preconditions.checkNotNull(objectName);
this.attributeName = Preconditions.checkNotNull(attributeName);
this.errorValue = errorValue;
}
@SuppressWarnings("unchecked")
@Override
public T getValue() {
try {
return (T) server.getAttribute(objectName, attributeName);
} catch (MBeanException | AttributeNotFoundException | InstanceNotFoundException | ReflectionException e) {
LOG.warn("Could not read attribute {}.", attributeName, e);
return errorValue;
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.optimizer.rules;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.asterix.lang.common.util.FunctionUtil;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.om.base.AString;
import org.apache.asterix.om.constants.AsterixConstantValue;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.AUnionType;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.ConstantExpressionUtil;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.common.utils.Triple;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
/**
* This rule resolves references to undefined identifiers with the following priority:
* 1. field-access
* 2. datasets
* based on the available type and metadata information.
*
*
* Note that undefined variable references that are FROM/JOIN/UNNEST/Quantifier binding expressions
* are resolved to dataset only, which has been done in
*
* @see org.apache.asterix.lang.sqlpp.rewrites.visitor.VariableCheckAndRewriteVisitor
*
*/
public class ResolveVariableRule implements IAlgebraicRewriteRule {
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
return false;
}
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
ILogicalOperator op = opRef.getValue();
if (op.getInputs().isEmpty()) {
return false;
}
// Populates the latest type information, e.g., resolved path sugars.
context.computeAndSetTypeEnvironmentForOperator(op);
if (op.acceptExpressionTransform(
exprRef -> rewriteExpressionReference(op, exprRef, new Triple<>(false, null, null), null, context))) {
// Generates the up-to-date type information.
context.computeAndSetTypeEnvironmentForOperator(op);
return true;
}
return false;
}
// Recursively rewrites for an expression within an operator.
private boolean rewriteExpressionReference(ILogicalOperator op, Mutable<ILogicalExpression> exprRef,
Triple<Boolean, String, String> fullyQualifiedDatasetPathCandidateFromParent,
Mutable<ILogicalExpression> parentFuncRef, IOptimizationContext context) throws AlgebricksException {
ILogicalExpression expr = exprRef.getValue();
if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return false;
}
boolean changed = false;
AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
Triple<Boolean, String, String> fullyQualifiedDatasetPathCandidate = resolveFullyQualifiedPath(funcExpr,
context);
for (Mutable<ILogicalExpression> funcArgRef : funcExpr.getArguments()) {
if (rewriteExpressionReference(op, funcArgRef, fullyQualifiedDatasetPathCandidate, exprRef, context)) {
changed = true;
}
}
// Cleans up extra scan-collections if there is.
if (changed) {
cleanupScanCollectionForDataset(funcExpr);
}
// Does the actual resolution.
return changed || resolve(op, context, exprRef, fullyQualifiedDatasetPathCandidateFromParent, parentFuncRef);
}
// Resolves a "resolve" function call expression to a fully qualified variable/field-access path or
// a dataset.
private boolean resolve(ILogicalOperator op, IOptimizationContext context, Mutable<ILogicalExpression> exprRef,
Triple<Boolean, String, String> fullyQualifiedDatasetPathCandidateFromParent,
Mutable<ILogicalExpression> parentFuncRef) throws AlgebricksException {
AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) exprRef.getValue();
if (funcExpr.getFunctionIdentifier() != BuiltinFunctions.RESOLVE) {
return false;
}
ILogicalExpression arg = funcExpr.getArguments().get(0).getValue();
String unresolvedVarName = extractConstantString(arg);
return resolveInternal(exprRef, hasMatchedDatasetForVariableName(unresolvedVarName, context),
findCandidatePaths(op, extractExprs(funcExpr.getArguments()), unresolvedVarName, context),
unresolvedVarName, fullyQualifiedDatasetPathCandidateFromParent, parentFuncRef, context);
}
// Extracts all possible expressions from the arguments of the "resolve" function.
private List<ILogicalExpression> extractExprs(List<Mutable<ILogicalExpression>> args) throws AlgebricksException {
List<ILogicalExpression> exprs = new ArrayList<>();
// The first arg is is the name of the undefined variable.
for (int index = 1; index < args.size(); ++index) {
ILogicalExpression argExpr = args.get(index).getValue();
exprs.add(argExpr);
}
return exprs;
}
// Resolves an undefined name to a dataset or a fully qualified variable/field-access path
// based on the given information of dataset matches and candidate paths.
private boolean resolveInternal(Mutable<ILogicalExpression> funcRef, boolean hasMatchedDataset,
Collection<Pair<ILogicalExpression, List<String>>> varAccessCandidates, String unresolvedVarName,
Triple<Boolean, String, String> fullyQualifiedDatasetPathCandidateFromParent,
Mutable<ILogicalExpression> parentFuncRef, IOptimizationContext context) throws AlgebricksException {
AbstractFunctionCallExpression func = (AbstractFunctionCallExpression) funcRef.getValue();
int numVarCandidates = varAccessCandidates.size();
// The resolution order: 1. field-access 2. datasets (standalone-name or fully-qualified)
if (numVarCandidates > 0) {
if (numVarCandidates == 1) {
resolveAsFieldAccess(funcRef, varAccessCandidates.iterator().next());
} else {
// More than one possibilities.
throw new AlgebricksException(
"Cannot resolve ambiguous alias reference for undefined identifier " + unresolvedVarName);
}
} else if (hasMatchedDataset) {
// Rewrites the "resolve" function to a "dataset" function and only keep the dataset name argument.
func.setFunctionInfo(FunctionUtil.getFunctionInfo(BuiltinFunctions.DATASET));
Mutable<ILogicalExpression> datasetNameExpression = func.getArguments().get(0);
func.getArguments().clear();
func.getArguments().add(datasetNameExpression);
} else if (fullyQualifiedDatasetPathCandidateFromParent.first) {
// Rewrites the parent "field-access" function to a "dataset" function.
AbstractFunctionCallExpression parentFunc = (AbstractFunctionCallExpression) parentFuncRef.getValue();
parentFunc.setFunctionInfo(FunctionUtil.getFunctionInfo(BuiltinFunctions.DATASET));
parentFunc.getArguments().clear();
parentFunc.getArguments().add(
new MutableObject<>(new ConstantExpression(
new AsterixConstantValue(new AString(fullyQualifiedDatasetPathCandidateFromParent.second
+ "." + fullyQualifiedDatasetPathCandidateFromParent.third)))));
} else {
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
// Cannot find any resolution.
throw new AlgebricksException("Cannot find dataset " + unresolvedVarName + " in dataverse "
+ metadataProvider.getDefaultDataverseName() + " nor an alias with name " + unresolvedVarName);
}
return true;
}
// Resolves a "resolve" function call as a field access.
private void resolveAsFieldAccess(Mutable<ILogicalExpression> funcRef,
Pair<ILogicalExpression, List<String>> varAndPath) {
// Rewrites to field-access-by-names.
ILogicalExpression expr = varAndPath.first;
List<String> path = varAndPath.second;
Mutable<ILogicalExpression> firstArgRef = new MutableObject<>(expr);
ILogicalExpression newFunc = null;
for (String fieldName : path) {
List<Mutable<ILogicalExpression>> args = new ArrayList<>();
args.add(firstArgRef);
args.add(new MutableObject<>(new ConstantExpression(new AsterixConstantValue(new AString(fieldName)))));
newFunc = new ScalarFunctionCallExpression(
FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_NAME), args);
firstArgRef = new MutableObject<>(newFunc);
}
funcRef.setValue(newFunc);
}
// Finds all candidate fully qualified expression/field-access paths.
private Set<Pair<ILogicalExpression, List<String>>> findCandidatePaths(ILogicalOperator op,
Collection<ILogicalExpression> referenceExprs, String unresolvedVarName, IOptimizationContext context)
throws AlgebricksException {
Set<Pair<ILogicalExpression, List<String>>> candidates = new HashSet<>();
IVariableTypeEnvironment env = context.getOutputTypeEnvironment(op.getInputs().get(0).getValue());
for (ILogicalExpression referenceExpr : referenceExprs) {
IAType type = (IAType) env.getType(referenceExpr);
candidates.addAll(findCandidatePathsForExpr(unresolvedVarName, type, referenceExpr, new ArrayList<>()));
}
return candidates;
}
// Recursively finds candidate paths under an expression.
private Set<Pair<ILogicalExpression, List<String>>> findCandidatePathsForExpr(String unresolvedVarName,
IAType pathType, ILogicalExpression expr, List<String> parentPath) throws AlgebricksException {
Set<Pair<ILogicalExpression, List<String>>> varAccessCandidates = new HashSet<>();
IAType type = pathType;
if (type.getTypeTag() == ATypeTag.UNION) {
type = ((AUnionType) type).getActualType();
}
ATypeTag tag = type.getTypeTag();
if (tag == ATypeTag.ANY) {
List<String> path = new ArrayList<>(parentPath);
path.add(unresolvedVarName);
varAccessCandidates.add(new Pair<>(expr, path));
}
if (tag == ATypeTag.OBJECT) {
ARecordType recordType = (ARecordType) type;
if (recordType.canContainField(unresolvedVarName)) {
// If the field name is possible.
List<String> path = new ArrayList<>(parentPath);
path.add(unresolvedVarName);
varAccessCandidates.add(new Pair<>(expr, path));
} else {
// Recursively identified possible paths.
String[] fieldNames = recordType.getFieldNames();
IAType[] fieldTypes = recordType.getFieldTypes();
for (int index = 0; index < fieldNames.length; ++index) {
List<String> path = new ArrayList<>(parentPath);
path.add(fieldNames[index]);
varAccessCandidates.addAll(findCandidatePathsForExpr(unresolvedVarName, fieldTypes[index], expr,
path));
}
}
}
return varAccessCandidates;
}
// Try to resolve the expression like resolve("x").foo as x.foo.
private Triple<Boolean, String, String> resolveFullyQualifiedPath(AbstractFunctionCallExpression funcExpr,
IOptimizationContext context) throws AlgebricksException {
if (!funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
return new Triple<>(false, null, null);
}
List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
ILogicalExpression firstExpr = args.get(0).getValue();
ILogicalExpression secondExpr = args.get(1).getValue();
if (firstExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return new Triple<>(false, null, null);
}
if (secondExpr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
return new Triple<>(false, null, null);
}
AbstractFunctionCallExpression firstFuncExpr = (AbstractFunctionCallExpression) firstExpr;
if (!firstFuncExpr.getFunctionIdentifier().equals(BuiltinFunctions.RESOLVE)) {
return new Triple<>(false, null, null);
}
ILogicalExpression dataverseNameExpr = firstFuncExpr.getArguments().get(0).getValue();
String dataverseName = extractConstantString(dataverseNameExpr);
String datasetName = extractConstantString(secondExpr);
return new Triple<>(hasMatchedDataverseDataset(dataverseName, datasetName, context), dataverseName,
datasetName);
}
// Checks whether the dataverse name and dataset name matche a dataset.
private boolean hasMatchedDataverseDataset(String dataverseName, String datasetName, IOptimizationContext context)
throws AlgebricksException {
MetadataProvider mdp = (MetadataProvider) context.getMetadataProvider();
return mdp.findDataset(dataverseName, datasetName) != null;
}
// Checks whether the name matches a dataset.
private boolean hasMatchedDatasetForVariableName(String varName, IOptimizationContext context)
throws AlgebricksException {
MetadataProvider mdp = (MetadataProvider) context.getMetadataProvider();
if (mdp.findDataset(mdp.getDefaultDataverseName(), varName) != null) {
return true;
}
if (varName.contains(".")) {
String[] path = varName.split("\\.");
if (path.length != 2) {
return false;
}
if (mdp.findDataset(path[0], path[1]) != null) {
return true;
}
}
return false;
}
// Cleans up scan collections on top of a "dataset" function call since "dataset"
// is an unnest function.
private void cleanupScanCollectionForDataset(AbstractFunctionCallExpression funcExpr) {
if (funcExpr.getFunctionIdentifier() != BuiltinFunctions.SCAN_COLLECTION) {
return;
}
ILogicalExpression arg = funcExpr.getArguments().get(0).getValue();
if (arg.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return;
}
AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) arg;
if (argFuncExpr.getFunctionIdentifier() != BuiltinFunctions.DATASET) {
return;
}
funcExpr.setFunctionInfo(argFuncExpr.getFunctionInfo());
funcExpr.getArguments().clear();
funcExpr.getArguments().addAll(argFuncExpr.getArguments());
}
// Extracts the name of an undefined variable.
private String extractConstantString(ILogicalExpression arg) throws AlgebricksException {
final String str = ConstantExpressionUtil.getStringConstant(arg);
if (str == null) {
throw new AlgebricksException("The argument is expected to be a string constant value.");
}
return str;
}
}
|
|
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 1997, 2010 Oracle and/or its affiliates. All rights reserved.
*
*/
package je.rep.quote;
import java.io.PrintStream;
import com.sleepycat.je.Durability;
import com.sleepycat.je.Environment;
import com.sleepycat.je.EnvironmentFailureException;
import com.sleepycat.je.LockConflictException;
import com.sleepycat.je.OperationFailureException;
import com.sleepycat.je.Transaction;
import com.sleepycat.je.TransactionConfig;
import com.sleepycat.je.Durability.ReplicaAckPolicy;
import com.sleepycat.je.Durability.SyncPolicy;
import com.sleepycat.je.rep.InsufficientAcksException;
import com.sleepycat.je.rep.InsufficientReplicasException;
import com.sleepycat.je.rep.NoConsistencyRequiredPolicy;
import com.sleepycat.je.rep.ReplicaConsistencyException;
import com.sleepycat.je.rep.ReplicaWriteException;
import com.sleepycat.je.rep.ReplicatedEnvironment;
/**
* Utility class to begin and commit/abort a transaction and handle exceptions
* according to this application's policies. The doTransactionWork method is
* abstract and must be implemented by callers. The transaction is run and
* doTransactionWork is called by the run() method of this class. The
* onReplicaWrite and onRetryFailure methods may optionally be overridden.
*/
public abstract class RunTransaction {
/* The maximum number of times to retry the transaction. */
private static final int TRANSACTION_RETRY_MAX = 10;
/*
* The number of seconds to wait between retries when a sufficient
* number of replicas are not available for a transaction.
*/
private static final int INSUFFICIENT_REPLICA_RETRY_SEC = 1;
/* Amount of time to wait to let a replica catch up before retrying. */
private static final int CONSISTENCY_RETRY_SEC = 1;
/* Amount of time to wait after a lock conflict. */
private static final int LOCK_CONFLICT_RETRY_SEC = 1;
private final ReplicatedEnvironment env;
private final PrintStream out;
/**
* Creates the runner.
*/
RunTransaction(ReplicatedEnvironment repEnv, PrintStream printStream) {
env = repEnv;
out = printStream;
}
/**
* Runs a transaction, calls the doTransactionWork method, and retries as
* needed.
* <p>
* If the transaction is read only, it uses Durability.READ_ONLY_TXN for
* the Transaction. Since this Durability policy does not call for any
* acknowledgments, it eliminates the possibility of a {@link
* InsufficientReplicasException} being thrown from the call to {@link
* Environment#beginTransaction} for a read only transaction on a Master,
* which is an overly stringent requirement. This makes the Master more
* available for read operations.
*
* @param readOnly determines whether the transaction to be run is read
* only.
*/
public void run(boolean readOnly)
throws InterruptedException, EnvironmentFailureException {
OperationFailureException exception = null;
boolean success = false;
long sleepMillis = 0;
TransactionConfig txnConfig = setupTxnConfig(readOnly);
for (int i = 0; i < TRANSACTION_RETRY_MAX; i++) {
/* Sleep before retrying. */
if (sleepMillis != 0) {
Thread.sleep(sleepMillis);
sleepMillis = 0;
}
Transaction txn = null;
try {
txn = env.beginTransaction(null, txnConfig);
doTransactionWork(txn); /* CALL APP-SPECIFIC CODE */
txn.commit();
success = true;
return;
} catch (InsufficientReplicasException insufficientReplicas) {
/*
* Retry the transaction. Give Replicas a chance to contact
* this Master, in case they have not had a chance to do so
* following an election.
*/
exception = insufficientReplicas;
out.println(insufficientReplicas.toString() +
"\n Retrying ...");
sleepMillis = INSUFFICIENT_REPLICA_RETRY_SEC * 1000;
if (i > 1) {
/*
* As an example of a possible application choice,
* elect to execute this operation with lower durability.
* That makes the node more available, but puts the
* data at greater risk.
*/
txnConfig = lowerDurability(txnConfig);
}
continue;
} catch (InsufficientAcksException insufficientReplicas) {
/*
* Transaction has been committed at this node. The other
* acknowledgments may be late in arriving, or may never arrive
* because the replica just went down.
*/
/*
* INSERT APP-SPECIFIC CODE HERE: For example, repeat
* idempotent changes to ensure they went through.
*
* Note that 'success' is false at this point, although some
* applications may consider the transaction to be complete.
*/
out.println(insufficientReplicas.toString());
txn = null;
return;
} catch (ReplicaWriteException replicaWrite) {
/*
* Attempted a modification while in the Replica state.
*
* CALL APP-SPECIFIC CODE HERE: Cannot accomplish the changes
* on this node, redirect the write to the new master and retry
* the transaction there. This could be done by forwarding the
* request to the master here, or by returning an error to the
* requester and retrying the request at a higher level.
*/
onReplicaWrite(replicaWrite);
return;
} catch (LockConflictException lockConflict) {
/*
* Retry the transaction. Note that LockConflictException
* covers the HA LockPreemptedException.
*/
exception = lockConflict;
out.println(lockConflict.toString() + "\n Retrying ...");
sleepMillis = LOCK_CONFLICT_RETRY_SEC * 1000;
continue;
} catch (ReplicaConsistencyException replicaConsistency) {
/*
* Retry the transaction to see if the replica becomes
* consistent. If consistency couldn't be satisfied, we can
* choose to relax the timeout associated with the
* ReplicaConsistencyPolicy, or to do a read with
* NoConsistencyRequiredPolicy.
*/
exception = replicaConsistency;
out.println(replicaConsistency.toString() +
"\n Retrying ...");
sleepMillis = CONSISTENCY_RETRY_SEC * 1000;
continue;
} finally {
if (!success) {
if (txn != null) {
txn.abort();
}
/*
* INSERT APP-SPECIFIC CODE HERE: Perform any app-specific
* cleanup.
*/
}
}
}
/*
* CALL APP-SPECIFIC CODE HERE: Transaction failed, despite retries.
*/
onRetryFailure(exception);
}
/**
* Must be implemented to perform operations using the given Transaction.
*/
public abstract void doTransactionWork(Transaction txn);
/**
* May be optionally overridden to handle a ReplicaWriteException. After
* this method is called, the RunTransaction constructor will return. By
* default, this method throws the ReplicaWriteException.
*/
public void onReplicaWrite(ReplicaWriteException replicaWrite) {
throw replicaWrite;
}
/**
* May be optionally overridden to handle a failure after the
* TRANSACTION_RETRY_MAX has been exceeded. After this method is called,
* the RunTransaction constructor will return. By default, this method
* prints the last exception.
*/
public void onRetryFailure(OperationFailureException lastException) {
out.println("Failed despite retries." +
((lastException == null) ?
"" :
" Encountered exception:" + lastException));
}
/**
* Reduce the Durability level so that we don't require any
* acknowledgments from replicas. An example of using lower durability
* requirements.
*/
private TransactionConfig lowerDurability(TransactionConfig txnConfig) {
out.println("\nLowering durability, execute update although " +
"replicas not available. Update may not be durable.");
TransactionConfig useTxnConfig = txnConfig;
if (useTxnConfig == null) {
useTxnConfig = new TransactionConfig();
}
useTxnConfig.setDurability(new Durability(SyncPolicy.WRITE_NO_SYNC,
SyncPolicy.NO_SYNC,
ReplicaAckPolicy.NONE));
return useTxnConfig;
}
/**
* Create an optimal transaction configuration.
*/
private TransactionConfig setupTxnConfig(boolean readOnly) {
if (!readOnly) {
/*
* A read/write transaction can just use the default transaction
* configuration. A null value for the configuration param means
* defaults should be used.
*/
return null;
}
if (env.getState().isUnknown()) {
/*
* This node is not in touch with the replication group master and
* because of that, can't fulfill any consistency checks. As an *
* example of a possible application choice, change the
* consistency * characteristics for this specific transaction and
* avoid a * ReplicaConsistencyException by lowering the
* consistency requirement now.
*/
out.println("\nLowering consistency, permit access of data " +
" currently on this node.");
return new TransactionConfig().setConsistencyPolicy
(NoConsistencyRequiredPolicy.NO_CONSISTENCY);
}
/*
* We can optimize a read operation by specifying a lower
* durability. Since Durability.READ_ONLY_TXN does not call for any
* acknowledgments, it eliminates the possibility of a {@link
* InsufficientReplicasException} being thrown from the call to {@link
* Environment#beginTransaction} for a read only transaction on a
* Master.
*/
return new TransactionConfig().setDurability
(Durability.READ_ONLY_TXN);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.eventadmin.impl.handler;
import java.util.*;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventHandler;
import org.osgi.util.tracker.ServiceTracker;
/**
* The event handler tracker keeps track of all event handler services.
*
* @author <a href="mailto:dev@felix.apache.org">Felix Project Team</a>
*/
public class EventHandlerTracker extends ServiceTracker {
/** The proxies in this list match all events. */
private volatile List matchingAllEvents;
/** This is a map for exact topic matches. The key is the topic,
* the value is a list of proxies.
*/
private volatile Map matchingTopic;
/** This is a map for wildcard topics. The key is the prefix of the topic,
* the value is a list of proxies
*/
private volatile Map matchingPrefixTopic;
/** The context for the proxies. */
private HandlerContext handlerContext;
public EventHandlerTracker(final BundleContext context) {
super(context, EventHandler.class.getName(), null);
// we start with empty collections
this.matchingAllEvents = new ArrayList();
this.matchingTopic = new HashMap();
this.matchingPrefixTopic = new HashMap();
}
/**
* Update the timeout configuration.
* @param ignoreTimeout
*/
public void update(final String[] ignoreTimeout, final boolean requireTopic) {
final Matcher[] ignoreTimeoutMatcher;
if ( ignoreTimeout == null || ignoreTimeout.length == 0 )
{
ignoreTimeoutMatcher = null;
}
else
{
ignoreTimeoutMatcher = new Matcher[ignoreTimeout.length];
for(int i=0;i<ignoreTimeout.length;i++)
{
String value = ignoreTimeout[i];
if ( value != null )
{
value = value.trim();
}
if ( value != null && value.length() > 0 )
{
if ( value.endsWith(".") )
{
ignoreTimeoutMatcher[i] = new PackageMatcher(value.substring(0, value.length() - 1));
}
else if ( value.endsWith("*") )
{
ignoreTimeoutMatcher[i] = new SubPackageMatcher(value.substring(0, value.length() - 1));
}
else
{
ignoreTimeoutMatcher[i] = new ClassMatcher(value);
}
}
}
}
this.handlerContext = new HandlerContext(this.context, ignoreTimeoutMatcher, requireTopic);
}
/**
* @see org.osgi.util.tracker.ServiceTracker#addingService(org.osgi.framework.ServiceReference)
*/
public Object addingService(final ServiceReference reference) {
final EventHandlerProxy proxy = new EventHandlerProxy(this.handlerContext, reference);
if ( proxy.update() ) {
this.put(proxy);
}
return proxy;
}
/**
* @see org.osgi.util.tracker.ServiceTracker#modifiedService(org.osgi.framework.ServiceReference, java.lang.Object)
*/
public void modifiedService(final ServiceReference reference, final Object service) {
final EventHandlerProxy proxy = (EventHandlerProxy)service;
this.remove(proxy);
if ( proxy.update() ) {
this.put(proxy);
}
}
/**
* @see org.osgi.util.tracker.ServiceTracker#removedService(org.osgi.framework.ServiceReference, java.lang.Object)
*/
public void removedService(ServiceReference reference, Object service) {
final EventHandlerProxy proxy = (EventHandlerProxy)service;
this.remove(proxy);
proxy.dispose();
}
private void updateMap(final Map proxyListMap, final String key, final EventHandlerProxy proxy, final boolean add) {
List proxies = (List)proxyListMap.get(key);
if (proxies == null) {
if ( !add )
{
return;
}
proxies = new ArrayList();
} else {
proxies = new ArrayList(proxies);
}
if ( add )
{
proxies.add(proxy);
}
else
{
proxies.remove(proxy);
}
if ( proxies.size() == 0 )
{
proxyListMap.remove(key);
}
else
{
proxyListMap.put(key, proxies);
}
}
/**
* Check the topics of the event handler and put it into the
* corresponding collections.
* We always create new collections - while this is "expensive"
* it allows us to read from them unsynced
*/
private synchronized void put(final EventHandlerProxy proxy) {
final String[] topics = proxy.getTopics();
if ( topics == null )
{
final List newMatchingAllEvents = new ArrayList(this.matchingAllEvents);
newMatchingAllEvents.add(proxy);
this.matchingAllEvents = newMatchingAllEvents;
}
else
{
Map newMatchingTopic = null;
Map newMatchingPrefixTopic = null;
for(int i = 0; i < topics.length; i++) {
final String topic = topics[i];
if ( topic.endsWith("/*") )
{
// prefix topic: we remove the /*
if ( newMatchingPrefixTopic == null )
{
newMatchingPrefixTopic = new HashMap(this.matchingPrefixTopic);
}
final String prefix = topic.substring(0, topic.length() - 2);
this.updateMap(newMatchingPrefixTopic, prefix, proxy, true);
}
else
{
// exact match
if ( newMatchingTopic == null )
{
newMatchingTopic = new HashMap(this.matchingTopic);
}
this.updateMap(newMatchingTopic, topic, proxy, true);
}
}
if ( newMatchingTopic != null )
{
this.matchingTopic = newMatchingTopic;
}
if ( newMatchingPrefixTopic != null )
{
this.matchingPrefixTopic = newMatchingPrefixTopic;
}
}
}
/**
* Check the topics of the event handler and remove it from the
* corresponding collections.
* We always create new collections - while this is "expensive"
* it allows us to read from them unsynced
*/
private synchronized void remove(final EventHandlerProxy proxy) {
final String[] topics = proxy.getTopics();
if ( topics == null )
{
final List newMatchingAllEvents = new ArrayList(this.matchingAllEvents);
newMatchingAllEvents.remove(proxy);
this.matchingAllEvents = newMatchingAllEvents;
} else {
Map newMatchingTopic = null;
Map newMatchingPrefixTopic = null;
for(int i = 0; i < topics.length; i++) {
final String topic = topics[i];
if ( topic.endsWith("/*") )
{
// prefix topic: we remove the /*
if ( newMatchingPrefixTopic == null )
{
newMatchingPrefixTopic = new HashMap(this.matchingPrefixTopic);
}
final String prefix = topic.substring(0, topic.length() - 2);
this.updateMap(newMatchingPrefixTopic, prefix, proxy, false);
}
else
{
// exact match
if ( newMatchingTopic == null )
{
newMatchingTopic = new HashMap(this.matchingTopic);
}
this.updateMap(newMatchingTopic, topic, proxy, false);
}
}
if ( newMatchingTopic != null )
{
this.matchingTopic = newMatchingTopic;
}
if ( newMatchingPrefixTopic != null )
{
this.matchingPrefixTopic = newMatchingPrefixTopic;
}
}
}
/**
* Get all handlers for this event
*
* @param event The event topic
* @return All handlers for the event
*/
public Collection getHandlers(final Event event) {
final String topic = event.getTopic();
final Set handlers = new HashSet();
// Add all handlers matching everything
handlers.addAll(this.matchingAllEvents);
// Now check for prefix matches
if ( !this.matchingPrefixTopic.isEmpty() )
{
int pos = topic.lastIndexOf('/');
while (pos != -1)
{
final String prefix = topic.substring(0, pos);
List proxies = (List)this.matchingPrefixTopic.get(prefix);
if (proxies != null)
{
handlers.addAll(proxies);
}
pos = prefix.lastIndexOf('/');
}
}
// Add the handlers for matching topic names
List proxies = (List)this.matchingTopic.get(topic);
if (proxies != null) {
handlers.addAll(proxies);
}
// now check permission and filters
final Iterator i = handlers.iterator();
while ( i.hasNext() ) {
final EventHandlerProxy proxy = (EventHandlerProxy) i.next();
if ( !proxy.canDeliver(event) ) {
i.remove();
}
}
return handlers;
}
/**
* The matcher interface for checking if timeout handling
* is disabled for the handler.
* Matching is based on the class name of the event handler.
*/
static interface Matcher
{
boolean match(String className);
}
/** Match a package. */
private static final class PackageMatcher implements Matcher
{
private final String m_packageName;
public PackageMatcher(final String name)
{
m_packageName = name;
}
public boolean match(String className)
{
final int pos = className.lastIndexOf('.');
return pos > -1 && className.substring(0, pos).equals(m_packageName);
}
}
/** Match a package or sub package. */
private static final class SubPackageMatcher implements Matcher
{
private final String m_packageName;
public SubPackageMatcher(final String name)
{
m_packageName = name + '.';
}
public boolean match(String className)
{
final int pos = className.lastIndexOf('.');
return pos > -1 && className.substring(0, pos + 1).startsWith(m_packageName);
}
}
/** Match a class name. */
private static final class ClassMatcher implements Matcher
{
private final String m_className;
public ClassMatcher(final String name)
{
m_className = name;
}
public boolean match(String className)
{
return m_className.equals(className);
}
}
/**
* The context object passed to the proxies.
*/
static final class HandlerContext
{
/** The bundle context. */
public final BundleContext bundleContext;
/** The matchers for ignore timeout handling. */
public final Matcher[] ignoreTimeoutMatcher;
/** Is a topic required. */
public final boolean requireTopic;
public HandlerContext(final BundleContext bundleContext,
final Matcher[] ignoreTimeoutMatcher,
final boolean requireTopic)
{
this.bundleContext = bundleContext;
this.ignoreTimeoutMatcher = ignoreTimeoutMatcher;
this.requireTopic = requireTopic;
}
}
}
|
|
/**
*/
package gluemodel.CIM.IEC61970.Informative.MarketOperations.impl;
import gluemodel.CIM.IEC61968.Common.impl.DocumentImpl;
import gluemodel.CIM.IEC61970.Informative.MarketOperations.Bid;
import gluemodel.CIM.IEC61970.Informative.MarketOperations.BidClearing;
import gluemodel.CIM.IEC61970.Informative.MarketOperations.Market;
import gluemodel.CIM.IEC61970.Informative.MarketOperations.MarketOperationsPackage;
import gluemodel.CIM.IEC61970.Informative.MarketOperations.ProductBid;
import java.util.Collection;
import java.util.Date;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Bid</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link gluemodel.CIM.IEC61970.Informative.MarketOperations.impl.BidImpl#getStopTime <em>Stop Time</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Informative.MarketOperations.impl.BidImpl#getMarketType <em>Market Type</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Informative.MarketOperations.impl.BidImpl#getProductBids <em>Product Bids</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Informative.MarketOperations.impl.BidImpl#getBidClearing <em>Bid Clearing</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Informative.MarketOperations.impl.BidImpl#getStartTime <em>Start Time</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Informative.MarketOperations.impl.BidImpl#getMarket <em>Market</em>}</li>
* </ul>
*
* @generated
*/
public class BidImpl extends DocumentImpl implements Bid {
/**
* The default value of the '{@link #getStopTime() <em>Stop Time</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStopTime()
* @generated
* @ordered
*/
protected static final Date STOP_TIME_EDEFAULT = null;
/**
* The cached value of the '{@link #getStopTime() <em>Stop Time</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStopTime()
* @generated
* @ordered
*/
protected Date stopTime = STOP_TIME_EDEFAULT;
/**
* The default value of the '{@link #getMarketType() <em>Market Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMarketType()
* @generated
* @ordered
*/
protected static final String MARKET_TYPE_EDEFAULT = null;
/**
* The cached value of the '{@link #getMarketType() <em>Market Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMarketType()
* @generated
* @ordered
*/
protected String marketType = MARKET_TYPE_EDEFAULT;
/**
* The cached value of the '{@link #getProductBids() <em>Product Bids</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getProductBids()
* @generated
* @ordered
*/
protected EList<ProductBid> productBids;
/**
* The cached value of the '{@link #getBidClearing() <em>Bid Clearing</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getBidClearing()
* @generated
* @ordered
*/
protected BidClearing bidClearing;
/**
* The default value of the '{@link #getStartTime() <em>Start Time</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStartTime()
* @generated
* @ordered
*/
protected static final Date START_TIME_EDEFAULT = null;
/**
* The cached value of the '{@link #getStartTime() <em>Start Time</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStartTime()
* @generated
* @ordered
*/
protected Date startTime = START_TIME_EDEFAULT;
/**
* The cached value of the '{@link #getMarket() <em>Market</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMarket()
* @generated
* @ordered
*/
protected Market market;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected BidImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return MarketOperationsPackage.Literals.BID;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Date getStopTime() {
return stopTime;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStopTime(Date newStopTime) {
Date oldStopTime = stopTime;
stopTime = newStopTime;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID__STOP_TIME, oldStopTime, stopTime));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getMarketType() {
return marketType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setMarketType(String newMarketType) {
String oldMarketType = marketType;
marketType = newMarketType;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID__MARKET_TYPE, oldMarketType, marketType));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ProductBid> getProductBids() {
if (productBids == null) {
productBids = new EObjectWithInverseResolvingEList<ProductBid>(ProductBid.class, this, MarketOperationsPackage.BID__PRODUCT_BIDS, MarketOperationsPackage.PRODUCT_BID__BID);
}
return productBids;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public BidClearing getBidClearing() {
if (bidClearing != null && bidClearing.eIsProxy()) {
InternalEObject oldBidClearing = (InternalEObject)bidClearing;
bidClearing = (BidClearing)eResolveProxy(oldBidClearing);
if (bidClearing != oldBidClearing) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, MarketOperationsPackage.BID__BID_CLEARING, oldBidClearing, bidClearing));
}
}
return bidClearing;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public BidClearing basicGetBidClearing() {
return bidClearing;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetBidClearing(BidClearing newBidClearing, NotificationChain msgs) {
BidClearing oldBidClearing = bidClearing;
bidClearing = newBidClearing;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID__BID_CLEARING, oldBidClearing, newBidClearing);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setBidClearing(BidClearing newBidClearing) {
if (newBidClearing != bidClearing) {
NotificationChain msgs = null;
if (bidClearing != null)
msgs = ((InternalEObject)bidClearing).eInverseRemove(this, MarketOperationsPackage.BID_CLEARING__BID, BidClearing.class, msgs);
if (newBidClearing != null)
msgs = ((InternalEObject)newBidClearing).eInverseAdd(this, MarketOperationsPackage.BID_CLEARING__BID, BidClearing.class, msgs);
msgs = basicSetBidClearing(newBidClearing, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID__BID_CLEARING, newBidClearing, newBidClearing));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Date getStartTime() {
return startTime;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStartTime(Date newStartTime) {
Date oldStartTime = startTime;
startTime = newStartTime;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID__START_TIME, oldStartTime, startTime));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Market getMarket() {
if (market != null && market.eIsProxy()) {
InternalEObject oldMarket = (InternalEObject)market;
market = (Market)eResolveProxy(oldMarket);
if (market != oldMarket) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, MarketOperationsPackage.BID__MARKET, oldMarket, market));
}
}
return market;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Market basicGetMarket() {
return market;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetMarket(Market newMarket, NotificationChain msgs) {
Market oldMarket = market;
market = newMarket;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID__MARKET, oldMarket, newMarket);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setMarket(Market newMarket) {
if (newMarket != market) {
NotificationChain msgs = null;
if (market != null)
msgs = ((InternalEObject)market).eInverseRemove(this, MarketOperationsPackage.MARKET__BIDS, Market.class, msgs);
if (newMarket != null)
msgs = ((InternalEObject)newMarket).eInverseAdd(this, MarketOperationsPackage.MARKET__BIDS, Market.class, msgs);
msgs = basicSetMarket(newMarket, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID__MARKET, newMarket, newMarket));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case MarketOperationsPackage.BID__PRODUCT_BIDS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getProductBids()).basicAdd(otherEnd, msgs);
case MarketOperationsPackage.BID__BID_CLEARING:
if (bidClearing != null)
msgs = ((InternalEObject)bidClearing).eInverseRemove(this, MarketOperationsPackage.BID_CLEARING__BID, BidClearing.class, msgs);
return basicSetBidClearing((BidClearing)otherEnd, msgs);
case MarketOperationsPackage.BID__MARKET:
if (market != null)
msgs = ((InternalEObject)market).eInverseRemove(this, MarketOperationsPackage.MARKET__BIDS, Market.class, msgs);
return basicSetMarket((Market)otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case MarketOperationsPackage.BID__PRODUCT_BIDS:
return ((InternalEList<?>)getProductBids()).basicRemove(otherEnd, msgs);
case MarketOperationsPackage.BID__BID_CLEARING:
return basicSetBidClearing(null, msgs);
case MarketOperationsPackage.BID__MARKET:
return basicSetMarket(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case MarketOperationsPackage.BID__STOP_TIME:
return getStopTime();
case MarketOperationsPackage.BID__MARKET_TYPE:
return getMarketType();
case MarketOperationsPackage.BID__PRODUCT_BIDS:
return getProductBids();
case MarketOperationsPackage.BID__BID_CLEARING:
if (resolve) return getBidClearing();
return basicGetBidClearing();
case MarketOperationsPackage.BID__START_TIME:
return getStartTime();
case MarketOperationsPackage.BID__MARKET:
if (resolve) return getMarket();
return basicGetMarket();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case MarketOperationsPackage.BID__STOP_TIME:
setStopTime((Date)newValue);
return;
case MarketOperationsPackage.BID__MARKET_TYPE:
setMarketType((String)newValue);
return;
case MarketOperationsPackage.BID__PRODUCT_BIDS:
getProductBids().clear();
getProductBids().addAll((Collection<? extends ProductBid>)newValue);
return;
case MarketOperationsPackage.BID__BID_CLEARING:
setBidClearing((BidClearing)newValue);
return;
case MarketOperationsPackage.BID__START_TIME:
setStartTime((Date)newValue);
return;
case MarketOperationsPackage.BID__MARKET:
setMarket((Market)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case MarketOperationsPackage.BID__STOP_TIME:
setStopTime(STOP_TIME_EDEFAULT);
return;
case MarketOperationsPackage.BID__MARKET_TYPE:
setMarketType(MARKET_TYPE_EDEFAULT);
return;
case MarketOperationsPackage.BID__PRODUCT_BIDS:
getProductBids().clear();
return;
case MarketOperationsPackage.BID__BID_CLEARING:
setBidClearing((BidClearing)null);
return;
case MarketOperationsPackage.BID__START_TIME:
setStartTime(START_TIME_EDEFAULT);
return;
case MarketOperationsPackage.BID__MARKET:
setMarket((Market)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case MarketOperationsPackage.BID__STOP_TIME:
return STOP_TIME_EDEFAULT == null ? stopTime != null : !STOP_TIME_EDEFAULT.equals(stopTime);
case MarketOperationsPackage.BID__MARKET_TYPE:
return MARKET_TYPE_EDEFAULT == null ? marketType != null : !MARKET_TYPE_EDEFAULT.equals(marketType);
case MarketOperationsPackage.BID__PRODUCT_BIDS:
return productBids != null && !productBids.isEmpty();
case MarketOperationsPackage.BID__BID_CLEARING:
return bidClearing != null;
case MarketOperationsPackage.BID__START_TIME:
return START_TIME_EDEFAULT == null ? startTime != null : !START_TIME_EDEFAULT.equals(startTime);
case MarketOperationsPackage.BID__MARKET:
return market != null;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (stopTime: ");
result.append(stopTime);
result.append(", marketType: ");
result.append(marketType);
result.append(", startTime: ");
result.append(startTime);
result.append(')');
return result.toString();
}
} //BidImpl
|
|
/**
* Copyright 2007-2016, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mina.proxy;
import java.util.LinkedList;
import java.util.Queue;
import org.apache.mina.core.buffer.IoBuffer;
import org.apache.mina.core.filterchain.IoFilter.NextFilter;
import org.apache.mina.core.future.DefaultWriteFuture;
import org.apache.mina.core.future.WriteFuture;
import org.apache.mina.core.session.IoSession;
import org.apache.mina.core.write.DefaultWriteRequest;
import org.apache.mina.core.write.WriteRequest;
import org.apache.mina.proxy.filter.ProxyFilter;
import org.apache.mina.proxy.filter.ProxyHandshakeIoBuffer;
import org.apache.mina.proxy.session.ProxyIoSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* AbstractProxyLogicHandler.java - Helper class to handle proxy handshaking logic. Derived classes
* implement proxy type specific logic.
* <p>
* Based upon SSLHandler from mina-filter-ssl.
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
* @since MINA 2.0.0-M3
*/
public abstract class AbstractProxyLogicHandler implements ProxyLogicHandler {
private static final Logger LOGGER = LoggerFactory
.getLogger(AbstractProxyLogicHandler.class);
/**
* Object that contains all the proxy authentication session informations.
*/
private ProxyIoSession proxyIoSession;
/**
* Queue of write events which occurred before the proxy handshake had completed.
*/
private Queue<Event> writeRequestQueue = null;
/**
* Has the handshake been completed.
*/
private boolean handshakeComplete = false;
/**
* Creates a new {@link AbstractProxyLogicHandler}.
*
* @param proxyIoSession {@link ProxyIoSession} in use.
*/
public AbstractProxyLogicHandler(ProxyIoSession proxyIoSession) {
this.proxyIoSession = proxyIoSession;
}
/**
* Returns the proxy filter {@link ProxyFilter}.
*/
protected ProxyFilter getProxyFilter() {
return proxyIoSession.getProxyFilter();
}
/**
* Returns the session.
*/
protected IoSession getSession() {
return proxyIoSession.getSession();
}
/**
* Returns the {@link ProxyIoSession} object.
*/
public ProxyIoSession getProxyIoSession() {
return proxyIoSession;
}
/**
* Writes data to the proxy server.
*
* @param nextFilter the next filter
* @param data Data buffer to be written.
*/
protected WriteFuture writeData(final NextFilter nextFilter,
final IoBuffer data) {
// write net data
ProxyHandshakeIoBuffer writeBuffer = new ProxyHandshakeIoBuffer(data);
LOGGER.debug(" session write: {}", writeBuffer);
WriteFuture writeFuture = new DefaultWriteFuture(getSession());
getProxyFilter().writeData(nextFilter, getSession(),
new DefaultWriteRequest(writeBuffer, writeFuture), true);
return writeFuture;
}
/**
* Returns <code>true</code> if handshaking is complete and
* data can be sent through the proxy.
*/
public boolean isHandshakeComplete() {
synchronized (this) {
return handshakeComplete;
}
}
/**
* Signals that the handshake has finished.
*/
protected final void setHandshakeComplete() {
synchronized (this) {
handshakeComplete = true;
}
ProxyIoSession proxyIoSession = getProxyIoSession();
proxyIoSession.getConnector()
.fireConnected(proxyIoSession.getSession())
.awaitUninterruptibly();
LOGGER.debug(" handshake completed");
// Connected OK
try {
proxyIoSession.getEventQueue().flushPendingSessionEvents();
flushPendingWriteRequests();
} catch (Exception ex) {
LOGGER.error("Unable to flush pending write requests", ex);
}
}
/**
* Send any write requests which were queued whilst waiting for handshaking to complete.
*/
protected synchronized void flushPendingWriteRequests() throws Exception {
LOGGER.debug(" flushPendingWriteRequests()");
if (writeRequestQueue == null) {
return;
}
Event scheduledWrite;
while ((scheduledWrite = writeRequestQueue.poll()) != null) {
LOGGER.debug(" Flushing buffered write request: {}",
scheduledWrite.data);
getProxyFilter().filterWrite(scheduledWrite.nextFilter,
getSession(), (WriteRequest) scheduledWrite.data);
}
// Free queue
writeRequestQueue = null;
}
/**
* Enqueue a message to be written once handshaking is complete.
*/
public synchronized void enqueueWriteRequest(final NextFilter nextFilter,
final WriteRequest writeRequest) {
if (writeRequestQueue == null) {
writeRequestQueue = new LinkedList<>();
}
writeRequestQueue.offer(new Event(nextFilter, writeRequest));
}
/**
* Closes the session.
*
* @param message the error message
* @param t the exception which caused the session closing
*/
protected void closeSession(final String message, final Throwable t) {
if (t != null) {
LOGGER.error(message, t);
proxyIoSession.setAuthenticationFailed(true);
} else {
LOGGER.error(message);
}
getSession().close(true);
}
/**
* Closes the session.
*
* @param message the error message
*/
protected void closeSession(final String message) {
closeSession(message, null);
}
/**
* Event wrapper class for enqueued events.
*/
private static final class Event {
private final NextFilter nextFilter;
private final Object data;
Event(final NextFilter nextFilter, final Object data) {
this.nextFilter = nextFilter;
this.data = data;
}
public Object getData() {
return data;
}
public NextFilter getNextFilter() {
return nextFilter;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.taskexecutor;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Deadline;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.AccessExecution;
import org.apache.flink.runtime.executiongraph.AccessExecutionGraph;
import org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.jobgraph.DistributionPattern;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobmanager.scheduler.SlotSharingGroup;
import org.apache.flink.runtime.jobmaster.JobResult;
import org.apache.flink.runtime.jobmaster.TestingAbstractInvokables;
import org.apache.flink.runtime.minicluster.TestingMiniCluster;
import org.apache.flink.runtime.minicluster.TestingMiniClusterConfiguration;
import org.apache.flink.runtime.testutils.CommonTestUtils;
import org.apache.flink.util.TestLogger;
import org.apache.flink.util.function.SupplierWithException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.time.Duration;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.function.Predicate;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
/**
* Integration tests for the {@link TaskExecutor}.
*/
public class TaskExecutorITCase extends TestLogger {
private static final Duration TESTING_TIMEOUT = Duration.ofMinutes(2L);
private static final int NUM_TMS = 2;
private static final int SLOTS_PER_TM = 2;
private static final int PARALLELISM = NUM_TMS * SLOTS_PER_TM;
private TestingMiniCluster miniCluster;
@Before
public void setup() throws Exception {
miniCluster = new TestingMiniCluster(
new TestingMiniClusterConfiguration.Builder()
.setNumTaskManagers(NUM_TMS)
.setNumSlotsPerTaskManager(SLOTS_PER_TM)
.build(),
null);
miniCluster.start();
}
@After
public void teardown() throws Exception {
if (miniCluster != null) {
miniCluster.close();
}
}
/**
* Tests that a job can be re-executed after the job has failed due
* to a TaskExecutor termination.
*/
@Test
public void testJobReExecutionAfterTaskExecutorTermination() throws Exception {
final JobGraph jobGraph = createJobGraph(PARALLELISM);
final CompletableFuture<JobResult> jobResultFuture = submitJobAndWaitUntilRunning(jobGraph);
// kill one TaskExecutor which should fail the job execution
miniCluster.terminateTaskExecutor(0);
final JobResult jobResult = jobResultFuture.get();
assertThat(jobResult.isSuccess(), is(false));
miniCluster.startTaskExecutor();
final JobGraph newJobGraph = createJobGraph(PARALLELISM);
BlockingOperator.unblock();
miniCluster.submitJob(newJobGraph).get();
miniCluster.requestJobResult(newJobGraph.getJobID()).get();
}
/**
* Tests that the job can recover from a failing {@link TaskExecutor}.
*/
@Test
public void testJobRecoveryWithFailingTaskExecutor() throws Exception {
final JobGraph jobGraph = createJobGraphWithRestartStrategy(PARALLELISM);
final CompletableFuture<JobResult> jobResultFuture = submitJobAndWaitUntilRunning(jobGraph);
// start an additional TaskExecutor
miniCluster.startTaskExecutor();
miniCluster.terminateTaskExecutor(0).get(); // this should fail the job
BlockingOperator.unblock();
assertThat(jobResultFuture.get().isSuccess(), is(true));
}
private CompletableFuture<JobResult> submitJobAndWaitUntilRunning(JobGraph jobGraph) throws Exception {
miniCluster.submitJob(jobGraph).get();
final CompletableFuture<JobResult> jobResultFuture = miniCluster.requestJobResult(jobGraph.getJobID());
assertThat(jobResultFuture.isDone(), is(false));
CommonTestUtils.waitUntilCondition(
jobIsRunning(() -> miniCluster.getExecutionGraph(jobGraph.getJobID())),
Deadline.fromNow(TESTING_TIMEOUT),
50L);
return jobResultFuture;
}
private SupplierWithException<Boolean, Exception> jobIsRunning(Supplier<CompletableFuture<? extends AccessExecutionGraph>> executionGraphFutureSupplier) {
final Predicate<AccessExecution> runningOrFinished = ExecutionGraphTestUtils.isInExecutionState(ExecutionState.RUNNING).or(ExecutionGraphTestUtils.isInExecutionState(ExecutionState.FINISHED));
final Predicate<AccessExecutionGraph> allExecutionsRunning = ExecutionGraphTestUtils.allExecutionsPredicate(runningOrFinished);
return () -> {
final AccessExecutionGraph executionGraph = executionGraphFutureSupplier.get().join();
return allExecutionsRunning.test(executionGraph);
};
}
private JobGraph createJobGraphWithRestartStrategy(int parallelism) throws IOException {
final JobGraph jobGraph = createJobGraph(parallelism);
final ExecutionConfig executionConfig = new ExecutionConfig();
executionConfig.setRestartStrategy(RestartStrategies.fixedDelayRestart(2, 0L));
jobGraph.setExecutionConfig(executionConfig);
return jobGraph;
}
private JobGraph createJobGraph(int parallelism) {
final JobVertex sender = new JobVertex("Sender");
sender.setParallelism(parallelism);
sender.setInvokableClass(TestingAbstractInvokables.Sender.class);
final JobVertex receiver = new JobVertex("Blocking receiver");
receiver.setParallelism(parallelism);
receiver.setInvokableClass(BlockingOperator.class);
BlockingOperator.reset();
receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
final SlotSharingGroup slotSharingGroup = new SlotSharingGroup();
sender.setSlotSharingGroup(slotSharingGroup);
receiver.setSlotSharingGroup(slotSharingGroup);
return new JobGraph("Blocking test job with slot sharing", sender, receiver);
}
/**
* Blocking invokable which is controlled by a static field.
*/
public static class BlockingOperator extends TestingAbstractInvokables.Receiver {
private static CountDownLatch countDownLatch = new CountDownLatch(1);
public BlockingOperator(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
countDownLatch.await();
super.invoke();
}
public static void unblock() {
countDownLatch.countDown();
}
public static void reset() {
countDownLatch = new CountDownLatch(1);
}
}
}
|
|
/**
* license (MIT) Copyright Nubisa Inc. 2014
*/
package jxm;
import com.google.gson.Gson;
import com.google.gson.internal.LinkedTreeMap;
import javax.crypto.*;
import javax.crypto.spec.*;
import java.security.*;
import java.lang.reflect.Method;
import java.util.*;
/**
* jxm.io Java Client
*/
public class Client {
private class Disconnector extends Thread {
Client dc;
public Disconnector(Client c) {
super(c.clientId + "-Disconnect");
dc = c;
}
@Override
public void run() {
try {
Thread.sleep(1);
} catch (Exception e) {
}
if (dc.Events != null)
dc.Events.OnClose(dc);
}
}
private String applicationPath = "";
private Object classToCall;
private String clientId = null;
boolean closed = false;
private boolean encrypted = false;
/**
* The ClientEvents object for listening of client's events.
* @custom.example
* <pre>
* [code language="java" smarttabs="true"]
* jxm.ClientEvents events = new ClientEvents(){
* @Override public void OnErrorReceived(Client c, String Message) {
* //Error received
* }
* @Override public void OnClientConnected(Client c) {
* //Client is connected
* }
* @Override public void OnClientDisconnected(Client c) {
* //Client is disconnected
* }
* @Override public void OnEventLog(Client c, String log, LogLevel level) {
* //get the event log from here
* }
* };
* //now we may define this listener into our Client instance
* client.Events = event;
* [/code]
* </pre>
*/
public ClientEvents Events = null;
private boolean isConnected = false;
private boolean isListenerActive = false;
private boolean isSecure = false;
private PListen listener = null;
private HashMap<String, Method> methodsOfCall = new HashMap<String, Method>();
private PListen send = null;
private int socketPort = 8000;
private String socketURL = null;
@SuppressWarnings("rawtypes")
private Class typeToCall;
/**
* Creates an instance of JXCore Java Client.
*
* @param localTarget The local object will be answering the calls from server. i.e new Test()
* @param appName Application Name
* @param appKey Secure Application Key
* @param url JXcore server URL i.e. sampledomain.com or 120.1.2.3
* @param port Server port
* @param secure Server SSL support
*
* @custom.example
* <pre>
* [code language="java" smarttabs="true"]
* // let's create a client instance
* Client client = new Client(new CustomMethods(), "channels",
* "NUBISA-STANDARD-KEY-CHANGE-THIS", "localhost", 8000, false);
* [/code]
* </pre>
*/
public Client(Object localTarget, String appName, String appKey, String url, int port, boolean secure) {
socketURL = url;
socketPort = port;
isSecure = secure;
applicationPath = appName;
applicationKey = PListen.getUID(false) + "|" + appName;
setSecureKey(appKey);
this.classToCall = localTarget;
if (localTarget != null && CustomMethodsBase.class.isAssignableFrom(localTarget.getClass())) {
((CustomMethodsBase)localTarget).SetClient(this);
}
}
/**
* Creates an instance of JXcore Java Client.
*
* @param localTarget The local object will be answering the calls from server. i.e new Test()
* @param appName Application Name
* @param appKey Secure Application Key
* @param url JXcore server URL i.e. sampledomain.com or 120.1.2.3
* @param port Server port
* @param secure Server SSL support
* @param resetUID Reset the unique instance id (session id)
* @custom.example
* <pre>
* [code language="java" smarttabs="true"]
* // let's create a client instance
* Client client = new Client(new CustomMethods(), "channels",
* "NUBISA-STANDARD-KEY-CHANGE-THIS", "localhost", 8000, false, true);
* [/code]
* </pre>
*/
public Client(Object localTarget, String appName, String appKey, String url, int port, boolean secure, boolean resetUID) {
socketURL = url;
socketPort = port;
isSecure = secure;
applicationPath = appName;
applicationKey = PListen.getUID(resetUID) + "|" + appName;
setSecureKey(appKey);
this.classToCall = localTarget;
if (localTarget != null && CustomMethodsBase.class.isAssignableFrom(localTarget.getClass())) {
((CustomMethodsBase)localTarget).SetClient(this);
}
}
private String applicationKey;
private String securedKey = null;
private void setSecureKey(String key){
securedKey = encrypt(key, applicationKey);
}
public String encrypt(String key, String message){
try{
byte[] input = message.toString().getBytes("utf-8");
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] thedigest = md.digest(key.getBytes("UTF-8"));
SecretKeySpec skc = new SecretKeySpec(thedigest, "AES");
Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
cipher.init(Cipher.ENCRYPT_MODE, skc);
byte[] cipherText = new byte[cipher.getOutputSize(input.length)];
int ctLength = cipher.update(input, 0, input.length, cipherText, 0);
ctLength += cipher.doFinal(cipherText, ctLength);
String str = Base64.encode(cipherText);
str = PListen.escape( str ).replace("+", "**43;");
return str;
}catch(Exception e){
if(Events!=null){Events.OnError(this, e.getMessage());}
return null;
}
}
/**
* Establishes the connection on a separate thread.
*/
public void AsyncConnect() {
Thread thread = new Thread() {
@Override
public void run() {
Connect();
}
};
thread.start();
}
/**
* Closes Client and disconnects from server.
*/
public void Close() {
goClose();
}
/**
* Closes Client and disconnects from server.
*/
public void goClose() {
if (closed)
return;
closed = true;
fireLog("Closing connection", LogLevel.Informative);
if (this.getIsConnected()) {
listener.Dispose();
send.exit = true;
this.isConnected = false;
if (Events != null) {
new Disconnector(this).start();
}
}
}
/**
* Subscribes the client to a group, or channel. From now on, messages sent to that group
* by any other subscriber will be received by the client.
* Also the client himself can send messages to this group - see jxcore.SendToGroup() method.
* @param group Name of the group, to which the client is subscribing.
* @param cb This is client's callback, which will be called after server will subscribe the client to the group.
* @throws Exception
* @custom.example
* <pre>
* [code language="java" smarttabs="true"]
* try {
* client.Subscribe("programmers", new Callback() {
* @Override
* public void call(Object o) throws Exception {
* System.out.println("Subscribed to " + o.toString());
* client.SendToGroup("programmers", "clientMethod",
* "Hello from client!");
* }
* });
* } catch (Exception e) {
* System.out.println("Cannot subscribe.");
* }
* [/code]
* </pre>
*/
public void Subscribe(final String group, final Callback cb) throws Exception {
if (group != null) {
Map<String, Object> map = new HashMap<String, Object>();
map.put("gr", group);
map.put("en", enc);
this.Call("nb.ssTo", map, new Callback() {
@Override
public void call(Object o, Integer err) throws Exception {
JSON js = (JSON) o;
if (err == 0) {
onSub(js.getValue("key").toString());
lastMessId = js.getValue("did").toString();
}
if (cb != null) {
cb.call(group, err);
}
}
});
} else {
Integer errCode = 6; /* must be non-empty string */
if (cb != null) {
cb.call(group, errCode);
} else {
throw new Exception("Error no " + errCode);
}
}
}
private void onSub(String en){
enc = en;
}
private String enc = null;
/**
* Unsubscribes the client from a group, or channel. From now on, messages sent to that group cannot be received by this client.
* @param group {string} - Name of the group, from which the client is unsubscribing.
* @param cb {function} - This is client's callback, which will be called after server will unsubscribe the client to the group.
* @throws Exception
* @custom.example
* <pre>
* [code language="java" smarttabs="true"]
* try {
* client.Unubscribe("programmers", new Callback() {
* @Override
* public void call(Object o) throws Exception {
* System.out.println("Unubscribed from " + o.toString());
* }
* });
* } catch (Exception e) {
* System.out.println("Cannot unubscribe.");
* }
* [/code]
* </pre>
*/
public void Unsubscribe(final String group, final Callback cb) throws Exception {
if (enc == null) {
return;
}
if (group != null) {
Map<String, Object> map = new HashMap<String, Object>();
map.put("gr", group);
map.put("en", enc);
this.Call("nb.unTo", map, new Callback() {
@Override
public void call(Object o, Integer err) throws Exception {
JSON js = (JSON) o;
if (err == 0) {
onSub(js.getValue("key").toString());
}
if (cb != null) {
cb.call(group, err);
}
}
});
} else {
Integer errCode = 6; /* must be non-empty string */
if (cb != null) {
cb.call(group, errCode);
} else {
throw new Exception("Error no " + errCode);
}
}
}
/**
* Sends message to all clients, that have already subscribed to the specific group.
* @param groupName {string} - Name of the group, to which message should be sent.
* @param methodName {string} - Client's custom method, which should be invoked of each of the group subscribers.
* @param params {object} - The argument for that method.
* @custom.example
* <pre>
* [code language="java" smarttabs="true"]
* // The "addText" method should be available on every client, which is subscribed to
* // "programmers" group.
* // While invoking the "addText" method at each client, the server will pass
* // "Hello from client!" string as an argument.
* cli.SendToGroup("programmers", "addText", "Hello from client!");
* [/code]
* </pre>
*/
public void SendToGroup(String groupName, String methodName, Object params, Callback cb){
Map<String, Object> map = new HashMap<String, Object>();
map.put("gr", groupName);
map.put("m", methodName);
map.put("j", params);
map.put("key", enc);
this.Call("nb.stGr", map, cb);
}
/**
* Starts the client. Connects to the server.
* @return true/false based on the result.
* @custom.example
* <pre>
* [code language="java" smarttabs="true"]
* // we will try to connect now
* if (client.Connect()) {
* System.out.println("ready!");
* }
* [/code]
* </pre>
*/
public boolean Connect() {
if (isConnected) {
errorMessage("JXcore Client is already connected.");
return false;
}
if (closed) {
errorMessage("Once a Client is disconnected you may not use the same instance to reconnect back.");
return false;
}
fireLog("Connecting to server", LogLevel.Informative);
this.Initialize();
if(getSecuredKey() == null)
return false;
String connStr = socketURL.concat(":" + socketPort + "/" + applicationPath + "/jx?ms=connect&de=1&sid=" + getSecuredKey() + "&a");
if(!connStr.startsWith("http")) {
if (isSecure) {
connStr = "https://" + connStr;
} else {
connStr = "http://" + connStr;
}
}else{
if(connStr.startsWith("https"))
isSecure = true;
else
isSecure = false;
}
String str = send.downloadString(connStr);
boolean end = false;
if (str != null && str != "") {
String [] arr = str.split("\\|");
if(arr.length<2){
end = true;
}else{
clientId = arr[0];
try {
encrypted = Boolean.parseBoolean(arr[1]);
} catch (Exception e) {
errorMessage("Couldn't connect to server. more:" + e.getMessage());
return false;
}
isConnected = true;
}
}
else end = true;
if(end){
errorMessage("Couldn't connect to server. Check URL for service.");
return false;
}
fireLog("Connection script parsed. Starting to listen.", LogLevel.Informative);
// let listener start before OnClientConnected
boolean ret = Listen();
// if(Events!=null){
// Events.OnClientConnected(this);
// }
return ret;
}
public boolean getIsSecure(){
return isSecure;
}
private void errorMessage(String message) {
if (Events != null)
Events.OnError(this, message);
}
public final class JSON
{
LinkedTreeMap<String, Object> obj = null;
public JSON(String json, boolean isArray){
try{
if(!isArray){
obj = (LinkedTreeMap<String, Object>)aParser.fromJson(json, Object.class);
}else{
array = aParser.fromJson(json, Object[].class);
}
initialized = true;
}
catch(Exception e){
initialized = false;
if(Events != null){
Events.OnError(null, e.getMessage());
}
}
}
private boolean initialized = false;
public boolean isInitialized(){
return initialized;
}
private JSON(LinkedTreeMap<String, Object> o){
obj = o;
}
private Gson aParser = new Gson();
private Object [] array = null;
public void toArray(){
if(obj==null)
return;
array = obj.values().toArray();
}
public int size(){
if(array==null)
return 0;
return array.length;
}
public JSON getItem(int index){
if(array==null)
return null;
return new JSON((LinkedTreeMap<String, Object>)array[index]);
}
public boolean containsKey(String key){
if(obj==null)
return false;
return obj.containsKey(key);
}
public JSON getItem(String key){
if(obj==null)
return null;
return new JSON((LinkedTreeMap<String, Object>)obj.get(key));
}
public boolean isKeyObject(String key){
if(obj==null)
return false;
if(!obj.containsKey(key)){
return false;
}
return obj.get(key) instanceof LinkedTreeMap;
}
public Object getValue(String key){
if(obj == null)
return null;
return obj.get(key);
}
}
private String lastMessId = null;
private void Eval(String msg)
{
msg = msg.replace(":null", ":'null'");
fireLog("evaluating message:" + msg, LogLevel.Informative);
JSON json = new JSON("[" + msg + "]", true);
int size = json.size();
for(int i=0;i<size;i++){
JSON js = json.getItem(i);
if(js.containsKey("i")){
Object oi = js.getValue("i");
if(oi!=null){
lastMessId = oi.toString();
}
}
if(js.containsKey("o")){
js = js.getItem("o");
String methodName = null;
String strIndex = null;
if(js.containsKey("m")){
methodName = js.getValue("m").toString();
}
else if(js.containsKey("i")){
strIndex = js.getValue("i").toString();
}
else
continue;
Object param = null;
if(js.containsKey("p")){
if(js.isKeyObject("p")){
param = js.getItem("p");
}else
{
param = js.getValue("p");
}
}
if(methodName != null && (methodName.contains("jxcore.Listen") || methodName.contains("jxcore.Close"))){
if(methodName.contains("jxcore.Close"))
this.goClose();
}
else if (strIndex!=null){
try{
float fl = Float.valueOf(strIndex).floatValue();
int n = (int)fl;
if (n < 0) {
ssCall(n, param);
} else {
Integer err = 0;
if (JSON.class.isAssignableFrom(param.getClass())) {
JSON p = (JSON)param;
Object nb_err = p.containsKey("nb_err") ? p.getValue("nb_err") : null;
if (nb_err != null) {
float fl1 = Float.valueOf(nb_err.toString()).floatValue();
err = (int)fl1;
}
}
if(callbacks.size()>n-1) {
callbacks.get(n-1).call(param, err);
callbacks.set(n-1, null);
}
}
}catch(Exception e){
errorMessage("CallbackInvoke at (" + strIndex + ") :" + e.getMessage());
}
}
else if (classToCall != null && methodsOfCall.containsKey(methodName)) {
try {
methodsOfCall.get(methodName).invoke(classToCall, param);
} catch (Exception e) {
errorMessage("MethodInvoke (" + methodName + ") :" + e.getMessage());
}
} else {
fireLog("Method " + methodName + " wasn't exist on target object.", LogLevel.Critical);
}
}
}
json = null;
}
// server-side call
private void ssCall(int id, Object param) {
if (id==-1) {
JSON js = (JSON)param;
Object key = js.getValue("key");
Object did = js.getValue("did");
if (key != null) {
onSub(key.toString());
}
if (did != null) {
lastMessId = did.toString();
}
if (Events != null) {
Events.OnSubscription(this,js.getValue("su").toString().toLowerCase() == "true", js.getValue("gr").toString());
}
}
}
/**
* Fires log event
*
* @param log
* @param level
*/
public void fireLog(String log, LogLevel level) {
if (Events != null) {
Events.OnEventLog(this, log, level);
}
}
/**
* Gets unique id of the client.
*/
public String GetClientId() {
return clientId;
}
public boolean getEncrypted() {
return encrypted;
}
public boolean getIsConnected() {
return isConnected;
}
public int getSocketPort() {
return socketPort;
}
public String getSocketURL() {
return socketURL;
}
public String getApplicationPath(){
return applicationPath;
}
private void Initialize() {
fireLog("Initializing Client", LogLevel.Informative);
listener = new PListen("listen", this);
send = new PListen("send", this);
final Client dc = this;
listener.notifier = new PEvents() {
@Override
public void ErrorReceived(String message) {
errorMessage(message);
}
@Override
public void MessageReceived(String message) {
messageReceived(message);
}
@Override
public void UpdateIsConnected(boolean connected) {
isConnected = connected;
fireLog("Connection state is updated to " + connected, LogLevel.Informative);
if (Events != null) {
if (connected)
Events.OnConnect(dc);
else {
if (!closed)
Events.OnClose(dc);
}
}
}
};
send.notifier = listener.notifier;
isListenerActive = false;
if (classToCall != null) {
typeToCall = classToCall.getClass();
Method[] methods = typeToCall.getMethods();
int ln = methods.length;
for (int i = 0; i < ln; i++) {
Method method = methods[i];
methodsOfCall.put(method.getName(), method);
}
}
}
private boolean Listen() {
if (isListenerActive)
return false;
fireLog("Entering Listener Thread", LogLevel.Informative);
isListenerActive = true;
listener.start();
return true;
}
private void messageReceived(String message) {
Eval(message);
}
public String getSecuredKey(){
return securedKey;
}
private String createJSON(String methodName, Object params, Callback callback){
StringBuilder sb = new StringBuilder();
sb.append("{");
if(methodName != null){
sb.append("\"m\":\"" + methodName + "\"");
}
if(params!=null){
sb.append(",\"p\":" + new Gson().toJson(params));
}
if(callback!=null){
callbacks.add(callback);
sb.append(",\"i\":" + callbacks.size());
}
sb.append("}");
return sb.toString();
}
static List<Callback> callbacks = new ArrayList<Callback>();
/**
* Invokes specific custom method defined on the server-side.
* @param methodName The name of custom method defined at the backend. It should contain also class definer name. i.e. MyClass.MyMethod.
* @param params The argument for that method.
* @param callback This is client's callback, which will be called after server completes invoking it's custom method. This parameter is optional.
* @throws java.lang.UnsupportedOperationException
* @custom.example
* <pre>
* [code language="java" smarttabs="true"]
* // let's call the server-side method "serverMethod" from the client-side!
* // in turn, as a response, the backend service will invoke
* // client's local "callback" defined above!
* client.Call("serverMethod", "Hello", callback);
* [/code]
* </pre>
*/
public void Call(String methodName, Object params, Callback callback) throws java.lang.UnsupportedOperationException {
if (!this.getIsConnected())
return;
String sb = createJSON(methodName, params, callback);
if(getSecuredKey() == null)
return;
String connStr = socketURL.concat(":" + socketPort + "/" + applicationPath + "/jx?de=1&");
if(isSecure){
connStr = "https://" + connStr;
}else{
connStr = "http://" + connStr;
}
connStr = connStr.concat("c=" + clientId + "&sid="+securedKey+"&co=" + ((Long) (new Date().getTime())).toString());
String mess = PListen.CreateText(this, sb, false);
synchronized (sendList){
sendList.add(new SendQueue(methodName, connStr, mess));
}
if(sendDone){
sendDone = false;
sendThread = new Thread() {
@Override
public void run() {
try{
while(true)
{
synchronized (sendList){
if(sendList.isEmpty()){
sendDone = true;
break;
}
}
sendFromQueue();
}
}finally{
sendDone = true;
}
}
};
sendThread.start();
}
}
Thread sendThread = null;
boolean sendDone = true;
private class SendQueue {
public String methodName;
public String connStr;
public String mess;
public SendQueue(String a, String b, String c) {
methodName = a;
connStr = b;
mess = c;
}
}
private Queue<SendQueue> sendList = new ArrayDeque<SendQueue>();
private void sendFromQueue() {
SendQueue q = null;
synchronized (sendList){
q = sendList.poll();
}
if(q==null){
return;
}
String mess = q.mess;
String connStr = q.connStr;
String methodName = q.methodName;
String result = null;
if (!listener.socketEnabled()) {
result = send.downloadString(connStr, "ms=" + mess);
} else {
listener.socketSend(mess);
}
if (result != null) {
result = result.trim();
fireLog(result + " received for methodCall " + methodName, LogLevel.Informative);
if (result.startsWith("/**/")) {
if (result.contains("jxcore.Closed()")) {
this.goClose();
}
} else
Eval(result);
}
}
}
|
|
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium;
import static org.junit.Assert.assertEquals;
import static org.openqa.selenium.remote.CapabilityType.PROXY;
import static org.openqa.selenium.support.ui.ExpectedConditions.presenceOfElementLocated;
import static org.openqa.selenium.support.ui.ExpectedConditions.titleIs;
import static org.openqa.selenium.testing.Driver.HTMLUNIT;
import static org.openqa.selenium.testing.Driver.IE;
import static org.openqa.selenium.testing.Driver.MARIONETTE;
import static org.openqa.selenium.testing.Driver.PHANTOMJS;
import static org.openqa.selenium.testing.Driver.SAFARI;
import static org.openqa.selenium.testing.InProject.locate;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
import com.google.common.net.HostAndPort;
import com.google.common.net.HttpHeaders;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExternalResource;
import org.openqa.selenium.net.PortProber;
import org.openqa.selenium.net.UrlChecker;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.support.ui.WebDriverWait;
import org.openqa.selenium.testing.Ignore;
import org.openqa.selenium.testing.JUnit4TestBase;
import org.openqa.selenium.testing.NeedsLocalEnvironment;
import org.openqa.selenium.testing.NotYetImplemented;
import org.openqa.selenium.testing.drivers.WebDriverBuilder;
import org.seleniumhq.jetty9.server.Handler;
import org.seleniumhq.jetty9.server.Request;
import org.seleniumhq.jetty9.server.Server;
import org.seleniumhq.jetty9.server.ServerConnector;
import org.seleniumhq.jetty9.server.handler.AbstractHandler;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Tests that "Referer" headers are generated as expected under various conditions.
* Each test will perform the following steps in the browser:
* <ol>
* <li>navigate to page 1
* <li>click a link to page 2
* <li>click another link to page 3
* <li>click a link to go back to page 2
* <li>click a link to go forward to page 3
* </ol>
*
* <p>After performing the steps above, the test will check that the test server(s)
* recorded the expected HTTP requests. For each step, the tests expect:
* <ol>
* <li>a request for page1; no Referer header
* <li>a request for page2; Referer: $absolute-url-for-page1
* <li>a request for page3; Referer: $absolute-url-for-page2
* <li>no request
* <li>no request
* </ol>
*
* <p>Note: depending on the condition under test, the various pages may or may
* not be served by the same server.
*/
@Ignore(
value = {PHANTOMJS, SAFARI},
reason = "Opera/PhantomJS - not tested, " +
"Safari - not implemented")
public class ReferrerTest extends JUnit4TestBase {
private static String page1;
private static String page2;
private static String page3;
@Rule public CustomDriverFactory customDriverFactory = new CustomDriverFactory();
@Rule public PacFileServerResource pacFileServer = new PacFileServerResource();
@Rule public ProxyServer proxyServer = new ProxyServer();
@Rule public TestServer testServer1 = new TestServer();
@Rule public TestServer testServer2 = new TestServer();
@BeforeClass
public static void readPages() throws IOException {
page1 = Files.toString(locate("common/src/web/proxy/page1.html"), Charsets.UTF_8);
page2 = Files.toString(locate("common/src/web/proxy/page2.html"), Charsets.UTF_8);
page3 = Files.toString(locate("common/src/web/proxy/page3.html"), Charsets.UTF_8);
}
/**
* Tests navigation when all of the files are hosted on the same domain and the browser
* does not have a proxy configured.
*/
@Test
@NotYetImplemented(HTMLUNIT)
@NeedsLocalEnvironment
public void basicHistoryNavigationWithoutAProxy() {
testServer1.start();
String page1Url = buildPage1Url(testServer1, buildPage2Url(testServer1));
String page2Url = buildPage2Url(testServer1, buildPage3Url(testServer1));
String page3Url = buildPage3Url(testServer1);
performNavigation(driver, page1Url);
assertEquals(
ImmutableList.of(
new HttpRequest(page1Url, null),
new HttpRequest(page2Url, page1Url),
new HttpRequest(page3Url, page2Url)),
testServer1.getRequests());
}
/**
* Tests navigation across multiple domains when the browser does not have a proxy configured.
*/
@Test
@NotYetImplemented(HTMLUNIT)
@NeedsLocalEnvironment
public void crossDomainHistoryNavigationWithoutAProxy() {
testServer1.start();
testServer2.start();
String page1Url = buildPage1Url(testServer1, buildPage2Url(testServer2));
String page2Url = buildPage2Url(testServer2, buildPage3Url(testServer1));
String page3Url = buildPage3Url(testServer1);
performNavigation(driver, page1Url);
assertEquals(
ImmutableList.of(
new HttpRequest(page1Url, null),
new HttpRequest(page3Url, page2Url)),
testServer1.getRequests());
assertEquals(
ImmutableList.of(new HttpRequest(
page2Url,
page1Url)),
testServer2.getRequests());
}
/**
* Tests navigation when all of the files are hosted on the same domain and the browser is
* configured to use a proxy that permits direct access to that domain.
*/
@Test
@NotYetImplemented(HTMLUNIT)
@NeedsLocalEnvironment
public void basicHistoryNavigationWithADirectProxy() {
testServer1.start();
pacFileServer.setPacFileContents(
"function FindProxyForURL(url, host) { return 'DIRECT'; }");
pacFileServer.start();
WebDriver driver = customDriverFactory.createDriver(pacFileServer.getBaseUrl());
String page1Url = buildPage1Url(testServer1, buildPage2Url(testServer1));
String page2Url = buildPage2Url(testServer1, buildPage3Url(testServer1));
String page3Url = buildPage3Url(testServer1);
performNavigation(driver, page1Url);
assertEquals(
ImmutableList.of(
new HttpRequest(page1Url, null),
new HttpRequest(page2Url, page1Url),
new HttpRequest(page3Url, page2Url)),
testServer1.getRequests());
}
/**
* Tests navigation across multiple domains when the browser is configured to use a proxy that
* permits direct access to those domains.
*/
@Test
@NotYetImplemented(HTMLUNIT)
@NeedsLocalEnvironment
public void crossDomainHistoryNavigationWithADirectProxy() {
testServer1.start();
testServer2.start();
pacFileServer.setPacFileContents(
"function FindProxyForURL(url, host) { return 'DIRECT'; }");
pacFileServer.start();
WebDriver driver = customDriverFactory.createDriver(pacFileServer.getBaseUrl());
String page1Url = buildPage1Url(testServer1, buildPage2Url(testServer2));
String page2Url = buildPage2Url(testServer2, buildPage3Url(testServer1));
String page3Url = buildPage3Url(testServer1);
performNavigation(driver, page1Url);
assertEquals(
ImmutableList.of(
new HttpRequest(page1Url, null),
new HttpRequest(page3Url, page2Url)),
testServer1.getRequests());
assertEquals(
ImmutableList.of(new HttpRequest(
page2Url,
page1Url)),
testServer2.getRequests());
}
/**
* Tests navigation across multiple domains when the browser is configured to use a proxy that
* redirects the second domain to another host.
*/
@Ignore(MARIONETTE)
@NotYetImplemented(HTMLUNIT)
@Test
@NeedsLocalEnvironment
public void crossDomainHistoryNavigationWithAProxiedHost() {
testServer1.start();
testServer2.start();
pacFileServer.setPacFileContents(Joiner.on('\n').join(
"function FindProxyForURL(url, host) {",
" if (host.indexOf('example') != -1) {",
" return 'PROXY " + testServer2.getHostAndPort() + "';",
" }",
" return 'DIRECT';",
" }"));
pacFileServer.start();
WebDriver driver = customDriverFactory.createDriver(pacFileServer.getBaseUrl());
String page1Url = buildPage1Url(testServer1, "http://www.example.com" + buildPage2Url());
String page2Url = buildPage2Url("http://www.example.com", buildPage3Url(testServer1));
String page3Url = buildPage3Url(testServer1);
performNavigation(driver, page1Url);
assertEquals(
ImmutableList.of(
new HttpRequest(page1Url, null),
new HttpRequest(page3Url, page2Url)),
testServer1.getRequests());
assertEquals(
ImmutableList.of(
new HttpRequest(page2Url, page1Url)),
testServer2.getRequests());
}
/**
* Tests navigation across multiple domains when the browser is configured to use a proxy that
* intercepts requests to a specific host (www.example.com) - all other requests are permitted
* to connect directly to the target server.
*/
@Ignore(MARIONETTE)
@NotYetImplemented(HTMLUNIT)
@Test
@NeedsLocalEnvironment
public void crossDomainHistoryNavigationWhenProxyInterceptsHostRequests() {
testServer1.start();
proxyServer.start();
proxyServer.setPacFileContents(Joiner.on('\n').join(
"function FindProxyForURL(url, host) {",
" if (host.indexOf('example') != -1) {",
" return 'PROXY " + proxyServer.getHostAndPort() + "';",
" }",
" return 'DIRECT';",
" }"));
String page1Url = buildPage1Url(testServer1, "http://www.example.com" + buildPage2Url());
String page2Url = buildPage2Url("http://www.example.com", buildPage3Url(testServer1));
String page3Url = buildPage3Url(testServer1);
WebDriver driver = customDriverFactory.createDriver(proxyServer.getPacUrl());
performNavigation(driver, page1Url);
assertEquals(
ImmutableList.of(
new HttpRequest(page1Url, null),
new HttpRequest(page3Url, page2Url)),
testServer1.getRequests());
assertEquals(
ImmutableList.of(
new HttpRequest(page2Url, page1Url)),
proxyServer.getRequests());
}
/**
* Tests navigation on a single domain where the browser is configured to use a proxy that
* intercepts requests for page 2.
*/
@Ignore(
value = {IE, MARIONETTE},
reason = "IEDriver does not disable automatic proxy caching, causing this test to fail.",
issues = 6629)
@NotYetImplemented(HTMLUNIT)
@Test
@NeedsLocalEnvironment
public void navigationWhenProxyInterceptsASpecificUrl() {
testServer1.start();
proxyServer.start();
String page1Url = buildPage1Url(testServer1, buildPage2Url(testServer1));
String page2Url = buildPage2Url(testServer1, buildPage3Url(testServer1));
String page3Url = buildPage3Url(testServer1);
// Have our proxy intercept requests for page 2.
proxyServer.setPacFileContents(Joiner.on('\n').join(
"function FindProxyForURL(url, host) {",
" if (url.indexOf('/page2.html?next') != -1) {",
" return 'PROXY " + proxyServer.getHostAndPort() + "';",
" }",
" return 'DIRECT';",
" }"));
WebDriver driver = customDriverFactory.createDriver(proxyServer.getPacUrl());
performNavigation(driver, page1Url);
assertEquals(
ImmutableList.of(
new HttpRequest(page1Url, null),
new HttpRequest(page3Url, page2Url)),
testServer1.getRequests());
assertEquals(
ImmutableList.of(new HttpRequest(
page2Url,
page1Url)),
proxyServer.getRequests());
}
private void performNavigation(WebDriver driver, String firstUrl) {
WebDriverWait wait = new WebDriverWait(driver, 5);
driver.get(firstUrl);
wait.until(titleIs("Page 1"));
wait.until(presenceOfElementLocated(By.id("next"))).click();
wait.until(titleIs("Page 2"));
wait.until(presenceOfElementLocated(By.id("next"))).click();
wait.until(titleIs("Page 3"));
wait.until(presenceOfElementLocated(By.id("back"))).click();
wait.until(titleIs("Page 2"));
wait.until(presenceOfElementLocated(By.id("forward"))).click();
wait.until(titleIs("Page 3"));
}
private static String buildPage1Url(String nextUrl) {
return "/page1.html?next=" + encode(nextUrl);
}
private static String buildPage1Url(ServerResource server, String nextUrl) {
return server.getBaseUrl() + "/page1.html?next=" + encode(nextUrl);
}
private static String buildPage2Url(String nextUrl) {
return "/page2.html?next=" + encode(nextUrl);
}
private static String buildPage2Url(String server, String nextUrl) {
return server + "/page2.html?next=" + encode(nextUrl);
}
private static String buildPage2Url(ServerResource server, String nextUrl) {
return server.getBaseUrl() + "/page2.html?next=" + encode(nextUrl);
}
private static String buildPage2Url() {
return "/page2.html"; // Nothing special here.
}
private static String buildPage2Url(ServerResource server) {
return server.getBaseUrl() + "/page2.html"; // Nothing special here.
}
private static String buildPage3Url() {
return "/page3.html"; // Nothing special here.
}
private static String buildPage3Url(ServerResource server) {
return server.getBaseUrl() + "/page3.html"; // Nothing special here.
}
private static String encode(String url) {
try {
return URLEncoder.encode(url, Charsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("UTF-8 should always be supported!", e);
}
}
/**
* Manages a custom WebDriver implementation as an {@link ExternalResource} rule.
*/
private static class CustomDriverFactory extends ExternalResource {
WebDriver driver;
WebDriver createDriver(String pacUrl) {
Proxy proxy = new Proxy();
proxy.setProxyAutoconfigUrl(pacUrl);
DesiredCapabilities caps = new DesiredCapabilities();
caps.setCapability(PROXY, proxy);
return driver = new WebDriverBuilder().setDesiredCapabilities(caps).get();
}
@Override
protected void after() {
if (driver != null) {
driver.quit();
}
}
}
/**
* An {@link ExternalResource} for a basic HTTP server; ensures the server is shutdown when a
* test finishes.
*/
private abstract static class ServerResource extends ExternalResource {
protected final Server server;
ServerResource() {
server = new Server();
ServerConnector http = new ServerConnector(server);
int port = PortProber.findFreePort();
http.setPort(port);
http.setIdleTimeout(500000);
server.addConnector(http);
}
void addHandler(Handler handler) {
this.server.setHandler(handler);
}
HostAndPort getHostAndPort() {
return HostAndPort.fromParts(server.getURI().getHost(), server.getURI().getPort());
}
String getBaseUrl() {
return "http://" + getHostAndPort();
}
void start() {
try {
server.start();
new UrlChecker().waitUntilAvailable(10, TimeUnit.SECONDS, new URL(getBaseUrl()));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
protected void after() {
try {
server.stop();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
private static class PacFileServerResource extends ServerResource {
private String pacFileContents;
PacFileServerResource() {
addHandler(new AbstractHandler() {
@Override
public void handle(String s, Request baseRequest, HttpServletRequest request,
HttpServletResponse response) throws IOException, ServletException {
response.setContentType("application/x-javascript-config; charset=us-ascii");
response.setStatus(HttpServletResponse.SC_OK);
response.getWriter().println(getPacFileContents());
baseRequest.setHandled(true);
}
});
}
String getPacFileContents() {
return pacFileContents;
}
void setPacFileContents(String content) {
pacFileContents = content;
}
}
private static class TestServer extends ServerResource {
private final List<HttpRequest> requests;
TestServer() {
requests = Lists.newCopyOnWriteArrayList();
addHandler(new PageRequestHandler(requests));
}
List<HttpRequest> getRequests() {
return requests;
}
}
private static class ProxyServer extends ServerResource {
private final List<HttpRequest> requests;
private String pacFileContents;
ProxyServer() {
requests = Lists.newCopyOnWriteArrayList();
addHandler(new PageRequestHandler(requests) {
@Override
public void handle(String s, Request baseRequest, HttpServletRequest request,
HttpServletResponse response) throws IOException, ServletException {
if (request.getRequestURI().equals("/pac.js")) {
response.setContentType("application/x-javascript-config; charset=us-ascii");
response.setStatus(HttpServletResponse.SC_OK);
response.getWriter().println(getPacFileContents());
baseRequest.setHandled(true);
} else {
super.handle(s, baseRequest, request, response);
}
}
});
}
String getPacUrl() {
return getBaseUrl() + "/pac.js";
}
List<HttpRequest> getRequests() {
return requests;
}
String getPacFileContents() {
return pacFileContents;
}
void setPacFileContents(String content) {
pacFileContents = content;
}
}
private static class PageRequestHandler extends AbstractHandler {
private final List<HttpRequest> requests;
PageRequestHandler(List<HttpRequest> requests) {
this.requests = requests;
}
@Override
public void handle(String s, Request baseRequest, HttpServletRequest request,
HttpServletResponse response) throws IOException, ServletException {
if (request.getRequestURI().endsWith("/favicon.ico")) {
response.setStatus(204);
baseRequest.setHandled(true);
return;
}
// Don't record / requests so we can poll the server for availability in start().
if (!"/".equals(request.getRequestURI())) {
requests.add(new HttpRequest(
request.getRequestURL() + (request.getQueryString() == null ? "" : "?" + request.getQueryString()),
request.getHeader(HttpHeaders.REFERER)));
}
String responseHtml;
if (request.getRequestURI().contains("/page1.html")) {
responseHtml = page1;
} else if (request.getRequestURI().contains("/page2.html")) {
responseHtml = page2;
} else {
responseHtml = page3;
}
response.setContentType("text/html; charset=utf-8");
response.setStatus(HttpServletResponse.SC_OK);
response.getWriter().println(responseHtml);
baseRequest.setHandled(true);
}
}
/**
* Records basic information about a HTTP request.
*/
private static class HttpRequest {
private final String uri;
private final String referrer;
HttpRequest(String uri, String referrer) {
this.uri = uri;
this.referrer = referrer;
}
@Override
public int hashCode() {
return Objects.hashCode(uri, referrer);
}
@Override
public boolean equals(Object o) {
if (o instanceof HttpRequest) {
HttpRequest that = (HttpRequest) o;
return Objects.equal(this.uri, that.uri)
&& Objects.equal(this.referrer, that.referrer);
}
return false;
}
@Override
public String toString() {
return String.format("[uri=%s, referrer=%s]", uri, referrer);
}
}
}
|
|
package com.enginemobi.bssuite.domain;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.validator.constraints.Email;
import org.springframework.data.elasticsearch.annotations.Document;
import javax.persistence.*;
import org.hibernate.annotations.Type;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Set;
import org.joda.time.DateTime;
/**
* A user.
*/
@Entity
@Table(name = "jhi_user")
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)
@Document(indexName="user")
public class User extends AbstractAuditingEntity implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@NotNull
@Pattern(regexp = "^[a-z0-9]*$|(anonymousUser)")
@Size(min = 1, max = 50)
@Column(length = 50, unique = true, nullable = false)
private String login;
@JsonIgnore
@NotNull
@Size(min = 60, max = 60)
@Column(length = 60)
private String password;
@Size(max = 50)
@Column(name = "first_name", length = 50)
private String firstName;
@Size(max = 50)
@Column(name = "last_name", length = 50)
private String lastName;
@Email
@Size(max = 100)
@Column(length = 100, unique = true)
private String email;
@Column(nullable = false)
private boolean activated = false;
@Size(min = 2, max = 5)
@Column(name = "lang_key", length = 5)
private String langKey;
@Size(max = 20)
@Column(name = "activation_key", length = 20)
@JsonIgnore
private String activationKey;
@Size(max = 20)
@Column(name = "reset_key", length = 20)
private String resetKey;
@Type(type = "org.jadira.usertype.dateandtime.joda.PersistentDateTime")
@Column(name = "reset_date", nullable = true)
private DateTime resetDate = null;
@JsonIgnore
@ManyToMany
@JoinTable(
name = "jhi_user_authority",
joinColumns = {@JoinColumn(name = "user_id", referencedColumnName = "id")},
inverseJoinColumns = {@JoinColumn(name = "authority_name", referencedColumnName = "name")})
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)
private Set<Authority> authorities = new HashSet<>();
@JsonIgnore
@OneToMany(cascade = CascadeType.ALL, orphanRemoval = true, mappedBy = "user")
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)
private Set<PersistentToken> persistentTokens = new HashSet<>();
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getLogin() {
return login;
}
public void setLogin(String login) {
this.login = login;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public boolean getActivated() {
return activated;
}
public void setActivated(boolean activated) {
this.activated = activated;
}
public String getActivationKey() {
return activationKey;
}
public void setActivationKey(String activationKey) {
this.activationKey = activationKey;
}
public String getResetKey() {
return resetKey;
}
public void setResetKey(String resetKey) {
this.resetKey = resetKey;
}
public DateTime getResetDate() {
return resetDate;
}
public void setResetDate(DateTime resetDate) {
this.resetDate = resetDate;
}
public String getLangKey() {
return langKey;
}
public void setLangKey(String langKey) {
this.langKey = langKey;
}
public Set<Authority> getAuthorities() {
return authorities;
}
public void setAuthorities(Set<Authority> authorities) {
this.authorities = authorities;
}
public Set<PersistentToken> getPersistentTokens() {
return persistentTokens;
}
public void setPersistentTokens(Set<PersistentToken> persistentTokens) {
this.persistentTokens = persistentTokens;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
User user = (User) o;
if (!login.equals(user.login)) {
return false;
}
return true;
}
@Override
public int hashCode() {
return login.hashCode();
}
@Override
public String toString() {
return "User{" +
"login='" + login + '\'' +
", firstName='" + firstName + '\'' +
", lastName='" + lastName + '\'' +
", email='" + email + '\'' +
", activated='" + activated + '\'' +
", langKey='" + langKey + '\'' +
", activationKey='" + activationKey + '\'' +
"}";
}
}
|
|
/*
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.awt.X11;
import java.awt.Component;
import java.awt.peer.ComponentPeer;
import java.io.IOException;
import java.util.Iterator;
import sun.util.logging.PlatformLogger;
import sun.awt.AppContext;
import sun.awt.SunToolkit;
import sun.awt.dnd.SunDropTargetContextPeer;
import sun.awt.dnd.SunDropTargetEvent;
import sun.misc.Unsafe;
/**
* The XDropTargetContextPeer is the class responsible for handling
* the interaction between the XDnD/Motif DnD subsystem and Java drop targets.
*
* @since 1.5
*/
final class XDropTargetContextPeer extends SunDropTargetContextPeer {
private static final PlatformLogger logger =
PlatformLogger.getLogger("sun.awt.X11.xembed.xdnd.XDropTargetContextPeer");
private static final Unsafe unsafe = XlibWrapper.unsafe;
/*
* A key to store a peer instance for an AppContext.
*/
private static final Object DTCP_KEY = "DropTargetContextPeer";
private XDropTargetContextPeer() {}
static XDropTargetContextPeer getPeer(AppContext appContext) {
synchronized (_globalLock) {
XDropTargetContextPeer peer =
(XDropTargetContextPeer)appContext.get(DTCP_KEY);
if (peer == null) {
peer = new XDropTargetContextPeer();
appContext.put(DTCP_KEY, peer);
}
return peer;
}
}
static XDropTargetProtocolListener getXDropTargetProtocolListener() {
return XDropTargetProtocolListenerImpl.getInstance();
}
/*
* @param returnValue the drop action selected by the Java drop target.
*/
protected void eventProcessed(SunDropTargetEvent e, int returnValue,
boolean dispatcherDone) {
/* The native context is the pointer to the XClientMessageEvent
structure. */
long ctxt = getNativeDragContext();
/* If the event was not consumed, send a response to the source. */
try {
if (ctxt != 0 && !e.isConsumed()) {
Iterator dropTargetProtocols =
XDragAndDropProtocols.getDropTargetProtocols();
while (dropTargetProtocols.hasNext()) {
XDropTargetProtocol dropTargetProtocol =
(XDropTargetProtocol)dropTargetProtocols.next();
if (dropTargetProtocol.sendResponse(ctxt, e.getID(),
returnValue)) {
break;
}
}
}
} finally {
if (dispatcherDone && ctxt != 0) {
unsafe.freeMemory(ctxt);
}
}
}
protected void doDropDone(boolean success, int dropAction,
boolean isLocal) {
/* The native context is the pointer to the XClientMessageEvent
structure. */
long ctxt = getNativeDragContext();
if (ctxt != 0) {
try {
Iterator dropTargetProtocols =
XDragAndDropProtocols.getDropTargetProtocols();
while (dropTargetProtocols.hasNext()) {
XDropTargetProtocol dropTargetProtocol =
(XDropTargetProtocol)dropTargetProtocols.next();
if (dropTargetProtocol.sendDropDone(ctxt, success,
dropAction)) {
break;
}
}
} finally {
unsafe.freeMemory(ctxt);
}
}
}
protected Object getNativeData(long format)
throws IOException {
/* The native context is the pointer to the XClientMessageEvent
structure. */
long ctxt = getNativeDragContext();
if (ctxt != 0) {
Iterator dropTargetProtocols =
XDragAndDropProtocols.getDropTargetProtocols();
while (dropTargetProtocols.hasNext()) {
XDropTargetProtocol dropTargetProtocol =
(XDropTargetProtocol)dropTargetProtocols.next();
// getData throws IAE if ctxt is not for this protocol.
try {
return dropTargetProtocol.getData(ctxt, format);
} catch (IllegalArgumentException iae) {
}
}
}
return null;
}
private void cleanup() {
}
protected void processEnterMessage(SunDropTargetEvent event) {
if (!processSunDropTargetEvent(event)) {
super.processEnterMessage(event);
}
}
protected void processExitMessage(SunDropTargetEvent event) {
if (!processSunDropTargetEvent(event)) {
super.processExitMessage(event);
}
}
protected void processMotionMessage(SunDropTargetEvent event,
boolean operationChanged) {
if (!processSunDropTargetEvent(event)) {
super.processMotionMessage(event, operationChanged);
}
}
protected void processDropMessage(SunDropTargetEvent event) {
if (!processSunDropTargetEvent(event)) {
super.processDropMessage(event);
}
}
// If source is an XEmbedCanvasPeer, passes the event to it for processing and
// return true if the event is forwarded to the XEmbed child.
// Otherwise, does nothing and return false.
private boolean processSunDropTargetEvent(SunDropTargetEvent event) {
Object source = event.getSource();
if (source instanceof Component) {
ComponentPeer peer = ((Component)source).getPeer();
if (peer instanceof XEmbedCanvasPeer) {
XEmbedCanvasPeer xEmbedCanvasPeer = (XEmbedCanvasPeer)peer;
/* The native context is the pointer to the XClientMessageEvent
structure. */
long ctxt = getNativeDragContext();
if (logger.isLoggable(PlatformLogger.Level.FINER)) {
logger.finer(" processing " + event + " ctxt=" + ctxt +
" consumed=" + event.isConsumed());
}
/* If the event is not consumed, pass it to the
XEmbedCanvasPeer for processing. */
if (!event.isConsumed()) {
// NOTE: ctxt can be zero at this point.
if (xEmbedCanvasPeer.processXEmbedDnDEvent(ctxt,
event.getID())) {
event.consume();
return true;
}
}
}
}
return false;
}
public void forwardEventToEmbedded(long embedded, long ctxt,
int eventID) {
Iterator dropTargetProtocols =
XDragAndDropProtocols.getDropTargetProtocols();
while (dropTargetProtocols.hasNext()) {
XDropTargetProtocol dropTargetProtocol =
(XDropTargetProtocol)dropTargetProtocols.next();
if (dropTargetProtocol.forwardEventToEmbedded(embedded, ctxt,
eventID)) {
break;
}
}
}
static final class XDropTargetProtocolListenerImpl
implements XDropTargetProtocolListener {
private final static XDropTargetProtocolListener theInstance =
new XDropTargetProtocolListenerImpl();
private XDropTargetProtocolListenerImpl() {}
static XDropTargetProtocolListener getInstance() {
return theInstance;
}
public void handleDropTargetNotification(XWindow xwindow, int x, int y,
int dropAction, int actions,
long[] formats, long nativeCtxt,
int eventID) {
Object target = xwindow.getTarget();
// The Every component is associated with some AppContext.
assert target instanceof Component;
Component component = (Component)target;
AppContext appContext = SunToolkit.targetToAppContext(target);
// Every component is associated with some AppContext.
assert appContext != null;
XDropTargetContextPeer peer = XDropTargetContextPeer.getPeer(appContext);
peer.postDropTargetEvent(component, x, y, dropAction, actions, formats,
nativeCtxt, eventID,
!SunDropTargetContextPeer.DISPATCH_SYNC);
}
}
}
|
|
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.LocationExpander.Options;
import com.google.devtools.build.lib.analysis.stringtemplate.ExpansionException;
import com.google.devtools.build.lib.analysis.stringtemplate.TemplateContext;
import com.google.devtools.build.lib.analysis.stringtemplate.TemplateExpander;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.shell.ShellUtils;
import com.google.devtools.build.lib.syntax.Type;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nullable;
/**
* Expansion of strings and string lists by replacing make variables and $(location) functions.
*/
public final class Expander {
/** Indicates whether a string list attribute should be tokenized. */
private enum Tokenize {
YES,
NO
}
private final RuleContext ruleContext;
private final TemplateContext templateContext;
Expander(RuleContext ruleContext, TemplateContext templateContext) {
this.ruleContext = ruleContext;
this.templateContext = templateContext;
}
/**
* Returns a new instance that also expands locations using the default configuration of
* {@link LocationTemplateContext}.
*/
public Expander withLocations(Options... options) {
TemplateContext newTemplateContext =
new LocationTemplateContext(templateContext, ruleContext, options);
return new Expander(ruleContext, newTemplateContext);
}
/**
* Returns a new instance that also expands locations, passing {@link Options#ALLOW_DATA} to the
* underlying {@link LocationTemplateContext}.
*/
public Expander withDataLocations() {
return withLocations(Options.ALLOW_DATA);
}
/**
* Returns a new instance that also expands locations, passing {@link Options#ALLOW_DATA} and
* {@link Options#EXEC_PATHS} to the underlying {@link LocationTemplateContext}.
*/
public Expander withDataExecLocations() {
return withLocations(Options.ALLOW_DATA, Options.EXEC_PATHS);
}
/**
* Returns a new instance that also expands locations, passing the given location map, as well as
* {@link Options#EXEC_PATHS} to the underlying {@link LocationTemplateContext}.
*/
public Expander withExecLocations(ImmutableMap<Label, ImmutableCollection<Artifact>> locations) {
TemplateContext newTemplateContext =
new LocationTemplateContext(templateContext, ruleContext, locations, Options.EXEC_PATHS);
return new Expander(ruleContext, newTemplateContext);
}
/**
* Expands the given value string, tokenizes it, and then adds it to the given list. The attribute
* name is only used for error reporting.
*/
public void tokenizeAndExpandMakeVars(
List<String> result,
String attributeName,
String value) {
expandValue(result, attributeName, value, Tokenize.YES);
}
/**
* Expands make variables and $(location) tags in value, and optionally tokenizes the result.
*/
private void expandValue(
List<String> tokens,
String attributeName,
String value,
Tokenize tokenize) {
value = expand(attributeName, value);
if (tokenize == Tokenize.YES) {
try {
ShellUtils.tokenize(tokens, value);
} catch (ShellUtils.TokenizationException e) {
ruleContext.attributeError(attributeName, e.getMessage());
}
} else {
tokens.add(value);
}
}
/**
* Returns the string "expression" after expanding all embedded references to
* "Make" variables. If any errors are encountered, they are reported, and
* "expression" is returned unchanged.
*
* @param attributeName the name of the attribute
* @return the expansion of "expression".
*/
public String expand(String attributeName) {
return expand(attributeName, ruleContext.attributes().get(attributeName, Type.STRING));
}
/**
* Returns the string "expression" after expanding all embedded references to
* "Make" variables. If any errors are encountered, they are reported, and
* "expression" is returned unchanged.
*
* @param attributeName the name of the attribute from which "expression" comes;
* used for error reporting.
* @param expression the string to expand.
* @return the expansion of "expression".
*/
public String expand(@Nullable String attributeName, String expression) {
try {
return TemplateExpander.expand(expression, templateContext);
} catch (ExpansionException e) {
if (attributeName == null) {
ruleContext.ruleError(e.getMessage());
} else {
ruleContext.attributeError(attributeName, e.getMessage());
}
return expression;
}
}
/**
* Expands all the strings in the given list, optionally tokenizing them after expansion. The
* attribute name is only used for error reporting.
*/
private ImmutableList<String> expandAndTokenizeList(
String attrName, List<String> values, Tokenize tokenize) {
List<String> variables = new ArrayList<>();
for (String variable : values) {
expandValue(variables, attrName, variable, tokenize);
}
return ImmutableList.copyOf(variables);
}
/**
* Obtains the value of the attribute, expands all values, and returns the resulting list. If the
* attribute does not exist or is not of type {@link Type#STRING_LIST}, then this method throws
* an error.
*/
public ImmutableList<String> list(String attrName) {
return list(attrName, ruleContext.attributes().get(attrName, Type.STRING_LIST));
}
/**
* Expands all the strings in the given list. The attribute name is only used for error reporting.
*/
public ImmutableList<String> list(String attrName, List<String> values) {
return expandAndTokenizeList(attrName, values, Tokenize.NO);
}
/**
* Obtains the value of the attribute, expands, and tokenizes all values. If the attribute does
* not exist or is not of type {@link Type#STRING_LIST}, then this method throws an error.
*/
public ImmutableList<String> tokenized(String attrName) {
return tokenized(attrName, ruleContext.attributes().get(attrName, Type.STRING_LIST));
}
/**
* Expands all the strings in the given list, and tokenizes them after expansion. The attribute
* name is only used for error reporting.
*/
public ImmutableList<String> tokenized(String attrName, List<String> values) {
return expandAndTokenizeList(attrName, values, Tokenize.YES);
}
/**
* If the string consists of a single variable, returns the expansion of that variable. Otherwise,
* returns null. Syntax errors are reported.
*
* @param attrName the name of the attribute from which "expression" comes; used for error
* reporting.
* @param expression the string to expand.
* @return the expansion of "expression", or null.
*/
@Nullable
public String expandSingleMakeVariable(String attrName, String expression) {
try {
return TemplateExpander.expandSingleVariable(expression, templateContext);
} catch (ExpansionException e) {
ruleContext.attributeError(attrName, e.getMessage());
return expression;
}
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* A filter name and value pair that is used to return a more specific list of results from a describe operation.
* Filters can be used to match a set of resources by specific criteria, such as IDs. The filters supported by a
* describe operation are documented with the describe operation.
* </p>
* <note>
* <p>
* Currently, wildcards are not supported in filters.
* </p>
* </note>
* <p>
* The following actions can be filtered:
* </p>
* <ul>
* <li>
* <p>
* <code>DescribeDBClusterBacktracks</code>
* </p>
* </li>
* <li>
* <p>
* <code>DescribeDBClusterEndpoints</code>
* </p>
* </li>
* <li>
* <p>
* <code>DescribeDBClusters</code>
* </p>
* </li>
* <li>
* <p>
* <code>DescribeDBInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>DescribePendingMaintenanceActions</code>
* </p>
* </li>
* </ul>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/Filter" target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Filter implements Serializable, Cloneable {
/**
* <p>
* The name of the filter. Filter names are case-sensitive.
* </p>
*/
private String name;
/**
* <p>
* One or more filter values. Filter values are case-sensitive.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> values;
/**
* <p>
* The name of the filter. Filter names are case-sensitive.
* </p>
*
* @param name
* The name of the filter. Filter names are case-sensitive.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the filter. Filter names are case-sensitive.
* </p>
*
* @return The name of the filter. Filter names are case-sensitive.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the filter. Filter names are case-sensitive.
* </p>
*
* @param name
* The name of the filter. Filter names are case-sensitive.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Filter withName(String name) {
setName(name);
return this;
}
/**
* <p>
* One or more filter values. Filter values are case-sensitive.
* </p>
*
* @return One or more filter values. Filter values are case-sensitive.
*/
public java.util.List<String> getValues() {
if (values == null) {
values = new com.amazonaws.internal.SdkInternalList<String>();
}
return values;
}
/**
* <p>
* One or more filter values. Filter values are case-sensitive.
* </p>
*
* @param values
* One or more filter values. Filter values are case-sensitive.
*/
public void setValues(java.util.Collection<String> values) {
if (values == null) {
this.values = null;
return;
}
this.values = new com.amazonaws.internal.SdkInternalList<String>(values);
}
/**
* <p>
* One or more filter values. Filter values are case-sensitive.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setValues(java.util.Collection)} or {@link #withValues(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param values
* One or more filter values. Filter values are case-sensitive.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Filter withValues(String... values) {
if (this.values == null) {
setValues(new com.amazonaws.internal.SdkInternalList<String>(values.length));
}
for (String ele : values) {
this.values.add(ele);
}
return this;
}
/**
* <p>
* One or more filter values. Filter values are case-sensitive.
* </p>
*
* @param values
* One or more filter values. Filter values are case-sensitive.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Filter withValues(java.util.Collection<String> values) {
setValues(values);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getValues() != null)
sb.append("Values: ").append(getValues());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Filter == false)
return false;
Filter other = (Filter) obj;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getValues() == null ^ this.getValues() == null)
return false;
if (other.getValues() != null && other.getValues().equals(this.getValues()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getValues() == null) ? 0 : getValues().hashCode());
return hashCode;
}
@Override
public Filter clone() {
try {
return (Filter) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
package com.github.jnthnclt.os.lab.core.guts;
import com.github.jnthnclt.os.lab.base.BolBuffer;
import com.github.jnthnclt.os.lab.base.UIO;
import com.github.jnthnclt.os.lab.core.LABStats;
import com.github.jnthnclt.os.lab.core.api.exceptions.LABClosedException;
import com.github.jnthnclt.os.lab.core.api.exceptions.LABConcurrentSplitException;
import com.github.jnthnclt.os.lab.core.api.rawhide.Rawhide;
import com.github.jnthnclt.os.lab.core.guts.api.CommitIndex;
import com.github.jnthnclt.os.lab.core.guts.api.IndexFactory;
import com.github.jnthnclt.os.lab.core.guts.api.KeyToString;
import com.github.jnthnclt.os.lab.core.guts.api.MergerBuilder;
import com.github.jnthnclt.os.lab.core.guts.api.ReadIndex;
import com.github.jnthnclt.os.lab.core.guts.api.SplitterBuilder;
import com.github.jnthnclt.os.lab.log.LABLogger;
import com.github.jnthnclt.os.lab.log.LABLoggerFactory;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* @author jonathan.colt
*/
public class CompactableIndexes {
static private class IndexesLock {
}
private static final LABLogger LOG = LABLoggerFactory.getLogger();
// newest to oldest
private final LABStats stats;
private final Rawhide rawhide;
private final byte[] labId;
private final LABFiles labFiles;
private final IndexesLock indexesLock = new IndexesLock();
private volatile boolean[] merging = new boolean[0]; // is volatile for reference changes not value changes.
private volatile ReadOnlyIndex[] indexes = new ReadOnlyIndex[0]; // is volatile for reference changes not value changes.
private volatile long version;
private volatile boolean disposed = false;
private volatile boolean closed = false;
private final AtomicBoolean compacting = new AtomicBoolean();
private volatile TimestampAndVersion maxTimestampAndVersion = TimestampAndVersion.NULL;
public CompactableIndexes(LABStats stats, Rawhide rawhide, byte[] labId, LABFiles labFiles) {
this.stats = stats;
this.rawhide = rawhide;
this.labId = labId;
this.labFiles = labFiles;
}
public boolean append(ReadOnlyIndex index) {
int indexLengthChange;
synchronized (indexesLock) {
if (disposed) {
return false;
}
int length = indexes.length + 1;
boolean[] prependToMerging = new boolean[length];
prependToMerging[0] = false;
System.arraycopy(merging, 0, prependToMerging, 1, merging.length);
ReadOnlyIndex[] prependToIndexes = new ReadOnlyIndex[length];
prependToIndexes[0] = index;
System.arraycopy(indexes, 0, prependToIndexes, 1, indexes.length);
merging = prependToMerging;
indexLengthChange = prependToIndexes.length - indexes.length;
indexes = prependToIndexes;
refreshMaxTimestamp(prependToIndexes);
version++;
}
stats.debt.add(indexLengthChange);
return true;
}
private void refreshMaxTimestamp(ReadOnlyIndex[] concurrentReadableIndexs) {
long maxTimestamp = -1;
long maxTimestampVersion = -1;
for (ReadOnlyIndex rawConcurrentReadableIndex : concurrentReadableIndexs) {
Footer other = rawConcurrentReadableIndex.footer();
if (rawhide.isNewerThan(other.maxTimestamp,
other.maxTimestampVersion,
maxTimestamp,
maxTimestampVersion)) {
maxTimestamp = other.maxTimestamp;
maxTimestampVersion = other.maxTimestampVersion;
}
}
maxTimestampAndVersion = new TimestampAndVersion(maxTimestamp, maxTimestampVersion);
}
public TimestampAndVersion maxTimeStampAndVersion() {
return maxTimestampAndVersion;
}
public int debt() {
if (disposed) {
return 0;
}
int debt = (merging.length - 1);
return debt < 0 ? 0 : debt;
}
public long count() throws Exception {
long count = 0;
for (ReadOnlyIndex g : grab()) {
count += g.count();
}
return count;
}
public void close() throws Exception {
synchronized (indexesLock) {
for (ReadOnlyIndex index : indexes) {
index.closeReadable();
}
closed = true;
}
}
public void destroy() {
synchronized (indexesLock) {
for (ReadOnlyIndex index : indexes) {
index.destroy();
}
closed = true;
}
}
public Callable<Void> compactor(
LABStats stats,
String rawhideName,
long splittableIfKeysLargerThanBytes,
long splittableIfValuesLargerThanBytes,
long splittableIfLargerThanBytes,
SplitterBuilder splitterBuilder,
int minMergeDebt,
boolean fsync,
MergerBuilder mergerBuilder
) {
if (disposed) {
return null;
}
if (!splittable(splittableIfKeysLargerThanBytes, splittableIfValuesLargerThanBytes,
splittableIfLargerThanBytes) && debt() == 0) {
return null;
}
if (!compacting.compareAndSet(false, true)) {
return null;
}
return () -> {
try {
if (splittable(splittableIfKeysLargerThanBytes, splittableIfValuesLargerThanBytes,
splittableIfLargerThanBytes)) {
Callable<Void> splitter = splitterBuilder.buildSplitter(rawhideName, fsync, this::buildSplitter);
if (splitter != null) {
stats.spliting.incrementAndGet();
try {
splitter.call();
stats.split.incrementAndGet();
} finally {
stats.spliting.decrementAndGet();
}
}
}
if (debt() > 0) {
Callable<Void> merger = mergerBuilder.build(rawhideName, minMergeDebt, fsync, this::buildMerger);
if (merger != null) {
stats.merging.incrementAndGet();
try {
merger.call();
stats.merged.incrementAndGet();
} finally {
stats.merging.decrementAndGet();
}
}
}
return null;
} finally {
compacting.set(false);
}
};
}
private boolean splittable(
long splittableIfKeysLargerThanBytes,
long splittableIfValuesLargerThanBytes,
long splittableIfLargerThanBytes) {
ReadOnlyIndex[] splittable;
synchronized (indexesLock) {
if (disposed || indexes.length == 0) {
return false;
}
splittable = indexes;
}
Comparator<byte[]> byteBufferKeyComparator = rawhide.getKeyComparator();
byte[] minKey = null;
byte[] maxKey = null;
long worstCaseKeysSizeInBytes = 0;
long worstCaseValuesSizeInBytes = 0;
long worstCaseSizeInBytes = 0;
for (ReadOnlyIndex aSplittable : splittable) {
worstCaseKeysSizeInBytes += aSplittable.keysSizeInBytes();
worstCaseValuesSizeInBytes += aSplittable.valuesSizeInBytes();
worstCaseSizeInBytes += aSplittable.sizeInBytes();
if (minKey == null) {
minKey = aSplittable.minKey();
} else {
minKey = byteBufferKeyComparator.compare(minKey,
aSplittable.minKey()) < 0 ? minKey : aSplittable.minKey();
}
if (maxKey == null) {
maxKey = aSplittable.maxKey();
} else {
maxKey = byteBufferKeyComparator.compare(maxKey,
aSplittable.maxKey()) < 0 ? maxKey : aSplittable.maxKey();
}
}
if (Arrays.equals(minKey, maxKey)) {
return false;
}
if (splittableIfLargerThanBytes > 0 && worstCaseSizeInBytes > splittableIfLargerThanBytes) {
return true;
}
if (splittableIfKeysLargerThanBytes > 0 && worstCaseKeysSizeInBytes > splittableIfKeysLargerThanBytes) {
return true;
}
return splittableIfValuesLargerThanBytes > 0 && worstCaseValuesSizeInBytes > splittableIfValuesLargerThanBytes;
}
private Splitter buildSplitter(IndexFactory leftHalfIndexFactory,
IndexFactory rightHalfIndexFactory,
CommitIndex commitIndex,
boolean fsync) throws Exception {
// lock out merging if possible
long allVersion;
ReadOnlyIndex[] all;
synchronized (indexesLock) {
allVersion = version;
for (boolean b : merging) {
if (b) {
return null;
}
}
Arrays.fill(merging, true);
all = indexes;
}
return new Splitter(all, allVersion, leftHalfIndexFactory, rightHalfIndexFactory, commitIndex, fsync);
}
public class Splitter implements Callable<Void> {
private final ReadOnlyIndex[] all;
private long allVersion;
private final IndexFactory leftHalfIndexFactory;
private final IndexFactory rightHalfIndexFactory;
private final CommitIndex commitIndex;
private final boolean fsync;
public Splitter(ReadOnlyIndex[] all,
long allVersion,
IndexFactory leftHalfIndexFactory,
IndexFactory rightHalfIndexFactory,
CommitIndex commitIndex,
boolean fsync) {
this.all = all;
this.allVersion = allVersion;
this.leftHalfIndexFactory = leftHalfIndexFactory;
this.rightHalfIndexFactory = rightHalfIndexFactory;
this.commitIndex = commitIndex;
this.fsync = fsync;
}
@Override
public Void call() throws Exception {
BolBuffer leftKeyBuffer = new BolBuffer();
BolBuffer rightKeyBuffer = new BolBuffer();
Comparator<byte[]> comparator = rawhide.getKeyComparator();
while (true) {
ReadIndex[] readers = new ReadIndex[all.length];
try {
int splitLength = all.length;
long worstCaseCount = 0;
IndexRangeId join = null;
byte[] minKey = null;
byte[] maxKey = null;
for (int i = 0; i < all.length; i++) {
readers[i] = all[i].acquireReader();
worstCaseCount += readers[i].count();
IndexRangeId id = all[i].id();
if (join == null) {
join = new IndexRangeId(id.start, id.end, id.generation + 1);
} else {
join = join.join(id, Math.max(join.generation, id.generation + 1));
}
if (minKey == null) {
minKey = all[i].minKey();
} else {
minKey = comparator.compare(minKey, all[i].minKey()) < 0 ? minKey : all[i].minKey();
}
if (maxKey == null) {
maxKey = all[i].maxKey();
} else {
maxKey = comparator.compare(maxKey, all[i].maxKey()) < 0 ? maxKey : all[i].maxKey();
}
}
if (Arrays.equals(minKey, maxKey)) {
// TODO how not to get here over an over again when a key is larger that split size in byte Cannot split a single key
LOG.warn("Trying to split a single key." + Arrays.toString(minKey));
return null;
} else {
BolBuffer entryKeyBuffer = new BolBuffer();
byte[] middle = Lists.newArrayList(
UIO.iterateOnSplits(minKey, maxKey, true, 1, rawhide.getKeyComparator())).get(1);
BolBuffer bbMiddle = new BolBuffer(middle);
LABAppendableIndex leftAppendableIndex = null;
LABAppendableIndex rightAppendableIndex = null;
try {
leftAppendableIndex = leftHalfIndexFactory.createIndex(join, worstCaseCount - 1);
rightAppendableIndex = rightHalfIndexFactory.createIndex(join, worstCaseCount - 1);
LABAppendableIndex effectiveFinalRightAppenableIndex = rightAppendableIndex;
InterleaveStream feedInterleaver = new InterleaveStream(rawhide,
ActiveScan.indexToFeeds(readers, false, false, null, null, rawhide, null));
try {
LOG.debug("Splitting with a middle of:{}", Arrays.toString(middle));
leftAppendableIndex.append((leftStream) -> {
return effectiveFinalRightAppenableIndex.append((rightStream) -> {
BolBuffer rawEntry = new BolBuffer();
while ((rawEntry = feedInterleaver.next(rawEntry, null)) != null) {
int c = rawhide.compareKey(rawEntry, entryKeyBuffer,
bbMiddle);
if (c < 0) {
if (!leftStream.stream(rawEntry)) {
return false;
}
} else if (!rightStream.stream(rawEntry)) {
return false;
}
}
return true;
}, rightKeyBuffer);
}, leftKeyBuffer);
} finally {
feedInterleaver.close();
}
LOG.debug("Splitting is flushing for a middle of:{}", Arrays.toString(middle));
if (leftAppendableIndex.getCount() > 0) {
leftAppendableIndex.closeAppendable(fsync);
} else {
leftAppendableIndex.delete();
}
if (rightAppendableIndex.getCount() > 0) {
rightAppendableIndex.closeAppendable(fsync);
} else {
rightAppendableIndex.delete();
}
} catch (Exception x) {
try {
if (leftAppendableIndex != null) {
leftAppendableIndex.close();
leftAppendableIndex.delete();
}
if (rightAppendableIndex != null) {
rightAppendableIndex.close();
rightAppendableIndex.delete();
}
} catch (Exception xx) {
LOG.error("Failed while trying to cleanup after a failure.", xx);
}
throw x;
}
List<IndexRangeId> commitRanges = new ArrayList<>();
commitRanges.add(join);
LOG.debug("Splitting trying to catchup for a middle of:{}", Arrays.toString(middle));
CATCHUP_YOU_BABY_TOMATO:
while (true) {
ReadOnlyIndex[] catchupMergeSet;
synchronized (indexesLock) {
if (allVersion == version) {
LOG.debug("Commiting split for a middle of:{}", Arrays.toString(middle));
ReadOnlyIndex commit = commitIndex.commit(commitRanges);
disposed = true;
for (ReadOnlyIndex destroy : all) {
destroy.destroy();
}
stats.debt.add(-indexes.length);
indexes = new ReadOnlyIndex[0]; // TODO go handle null so that thread wait rety higher up
refreshMaxTimestamp(indexes);
version++;
merging = new boolean[0];
LOG.debug("All done splitting :) for a middle of:{}", Arrays.toString(middle));
return null;
} else {
LOG.debug("Version has changed {} for a middle of:{}", allVersion,
Arrays.toString(middle));
int catchupLength = merging.length - splitLength;
for (int i = 0; i < catchupLength; i++) {
if (merging[i]) {
LOG.debug("Waiting for merge flag to clear at {} for a middle of:{}", i,
Arrays.toString(middle));
LOG.debug("splitLength={} merge.length={} catchupLength={}", splitLength,
merging.length, catchupLength);
LOG.debug("merging:{}", Arrays.toString(merging));
indexesLock.wait();
LOG.debug("Merge flag to cleared at {} for a middle of:{}", i,
Arrays.toString(middle));
continue CATCHUP_YOU_BABY_TOMATO;
}
}
allVersion = version;
catchupMergeSet = new ReadOnlyIndex[catchupLength];
Arrays.fill(merging, 0, catchupLength, true);
System.arraycopy(indexes, 0, catchupMergeSet, 0, catchupLength);
splitLength = merging.length;
}
}
for (ReadOnlyIndex catchup : catchupMergeSet) {
IndexRangeId id = catchup.id();
LABAppendableIndex catchupLeftAppendableIndex = null;
LABAppendableIndex catchupRightAppendableIndex = null;
try {
catchupLeftAppendableIndex = leftHalfIndexFactory.createIndex(id, catchup.count());
catchupRightAppendableIndex = rightHalfIndexFactory.createIndex(id,
catchup.count());
LABAppendableIndex effectivelyFinalCatchupRightAppendableIndex = catchupRightAppendableIndex;
ReadIndex catchupReader = catchup.acquireReader();
try {
InterleaveStream catchupFeedInterleaver = new InterleaveStream(rawhide,
ActiveScan.indexToFeeds(new ReadIndex[]{catchup}, false, false, null,
null, rawhide, null));
try {
LOG.debug("Doing a catchup split for a middle of:{}",
Arrays.toString(middle));
catchupLeftAppendableIndex.append((leftStream) -> {
return effectivelyFinalCatchupRightAppendableIndex.append(
(rightStream) -> {
BolBuffer rawEntry = new BolBuffer();
while ((rawEntry = catchupFeedInterleaver.next(rawEntry,
null)) != null) {
if (rawhide.compareKey(
rawEntry,
entryKeyBuffer,
bbMiddle) < 0) {
if (!leftStream.stream(rawEntry)) {
return false;
}
} else if (!rightStream.stream(rawEntry)) {
return false;
}
}
return true;
}, rightKeyBuffer);
}, leftKeyBuffer);
} finally {
catchupFeedInterleaver.close();
}
} finally {
catchupReader.release();
}
LOG.debug("Catchup splitting is flushing for a middle of:{}",
Arrays.toString(middle));
catchupLeftAppendableIndex.closeAppendable(fsync);
catchupRightAppendableIndex.closeAppendable(fsync);
commitRanges.add(0, id);
} catch (Exception x) {
try {
if (catchupLeftAppendableIndex != null) {
catchupLeftAppendableIndex.close();
catchupLeftAppendableIndex.delete();
}
if (catchupRightAppendableIndex != null) {
catchupRightAppendableIndex.close();
catchupRightAppendableIndex.delete();
}
} catch (Exception xx) {
LOG.error("Failed while trying to cleanup after a failure.", xx);
}
throw x;
}
}
}
}
} catch (Exception x) {
StringBuilder sb = new StringBuilder();
sb.append("[");
for (int i = 0; i < all.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(all[i].name());
}
sb.append("]");
LOG.error("Failed to split:" + allVersion + " for " + sb.toString(), x);
synchronized (indexesLock) {
Arrays.fill(merging, false);
}
throw x;
} finally {
for (ReadIndex reader : readers) {
if (reader != null) {
reader.release();
}
}
}
}
}
}
private Merger buildMerger(int minimumRun,
boolean fsync,
IndexFactory indexFactory,
CommitIndex commitIndex) throws Exception {
boolean[] mergingCopy;
ReadOnlyIndex[] indexesCopy;
ReadOnlyIndex[] mergeSet;
MergeRange mergeRange;
long[] counts;
long[] sizes;
long[] generations;
synchronized (indexesLock) { // prevent others from trying to merge the same things
if (indexes == null || indexes.length <= 1) {
return null;
}
mergingCopy = merging;
indexesCopy = indexes;
counts = new long[indexesCopy.length];
sizes = new long[indexesCopy.length];
generations = new long[indexesCopy.length];
for (int i = 0; i < counts.length; i++) {
counts[i] = indexesCopy[i].count();
generations[i] = indexesCopy[i].id().generation;
sizes[i] = indexesCopy[i].sizeInBytes();
}
mergeRange = TieredCompaction.hbaseSause(minimumRun, mergingCopy, counts, sizes, generations);
if (mergeRange == null) {
return null;
}
mergeSet = new ReadOnlyIndex[mergeRange.length];
System.arraycopy(indexesCopy, mergeRange.offset, mergeSet, 0, mergeRange.length);
boolean[] updateMerging = new boolean[merging.length];
System.arraycopy(merging, 0, updateMerging, 0, merging.length);
Arrays.fill(updateMerging, mergeRange.offset, mergeRange.offset + mergeRange.length, true);
merging = updateMerging;
}
IndexRangeId join = null;
for (ReadOnlyIndex m : mergeSet) {
IndexRangeId id = m.id();
if (join == null) {
join = new IndexRangeId(id.start, id.end, mergeRange.generation + 1);
} else {
join = join.join(id, Math.max(join.generation, id.generation));
}
}
return new Merger(counts, generations, mergeSet, join, indexFactory, commitIndex, fsync, mergeRange);
}
public class Merger implements Callable<Void> {
private final long[] counts;
private final long[] generations;
private final ReadOnlyIndex[] mergeSet;
private final IndexRangeId mergeRangeId;
private final IndexFactory indexFactory;
private final CommitIndex commitIndex;
private final boolean fsync;
private final MergeRange mergeRange;
private Merger(
long[] counts,
long[] generations,
ReadOnlyIndex[] mergeSet,
IndexRangeId mergeRangeId,
IndexFactory indexFactory,
CommitIndex commitIndex,
boolean fsync,
MergeRange mergeRange) {
this.mergeRange = mergeRange;
this.counts = counts;
this.generations = generations;
this.mergeSet = mergeSet;
this.mergeRangeId = mergeRangeId;
this.indexFactory = indexFactory;
this.commitIndex = commitIndex;
this.fsync = fsync;
}
@Override
public String toString() {
return "Merger{" + "mergeRangeId=" + mergeRangeId + '}';
}
@Override
public Void call() throws Exception {
BolBuffer keyBuffer = new BolBuffer();
ReadOnlyIndex index;
ReadIndex[] readers = new ReadIndex[mergeSet.length];
try {
long startMerge = System.currentTimeMillis();
long worstCaseCount = 0;
for (int i = 0; i < mergeSet.length; i++) {
readers[i] = mergeSet[i].acquireReader();
worstCaseCount += readers[i].count();
}
LABAppendableIndex appendableIndex = null;
try {
appendableIndex = indexFactory.createIndex(mergeRangeId, worstCaseCount);
InterleaveStream feedInterleaver = new InterleaveStream(rawhide,
ActiveScan.indexToFeeds(readers, false, false, null, null, rawhide, null));
try {
appendableIndex.append((stream) -> {
BolBuffer rawEntry = new BolBuffer();
while ((rawEntry = feedInterleaver.next(rawEntry, null)) != null) {
if (!stream.stream(rawEntry)) {
return false;
}
}
return true;
}, keyBuffer);
} finally {
feedInterleaver.close();
}
appendableIndex.closeAppendable(fsync);
} catch (Exception x) {
try {
if (appendableIndex != null) {
appendableIndex.close();
appendableIndex.delete();
}
} catch (Exception xx) {
LOG.error("Failed while trying to cleanup after a failure.", xx);
}
throw x;
}
index = commitIndex.commit(Collections.singletonList(mergeRangeId));
int indexLengthChange;
synchronized (indexesLock) {
int newLength = (indexes.length - mergeSet.length) + 1;
boolean[] updateMerging = new boolean[newLength];
ReadOnlyIndex[] updateIndexes = new ReadOnlyIndex[newLength];
int ui = 0;
int mi = 0;
for (int i = 0; i < indexes.length; i++) {
if (mi < mergeSet.length && indexes[i] == mergeSet[mi]) {
if (mi == 0) {
updateMerging[ui] = false;
updateIndexes[ui] = index;
ui++;
}
mi++;
} else {
updateMerging[ui] = merging[i];
updateIndexes[ui] = indexes[i];
ui++;
}
}
merging = updateMerging;
indexLengthChange = updateIndexes.length - indexes.length;
indexes = updateIndexes;
refreshMaxTimestamp(updateIndexes);
version++;
}
stats.debt.add(indexLengthChange);
LOG.debug("Merged: {} millis counts:{} gens:{} {}",
(System.currentTimeMillis() - startMerge),
TieredCompaction.range(counts, mergeRange.offset, mergeRange.length),
Arrays.toString(generations),
index.name()
);
for (ReadOnlyIndex rawConcurrentReadableIndex : mergeSet) {
rawConcurrentReadableIndex.destroy();
}
} catch (Exception x) {
StringBuilder sb = new StringBuilder();
sb.append("[");
for (int i = 0; i < mergeSet.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(mergeSet[i].name());
}
sb.append("]");
LOG.error("Failed to merge range:" + mergeRangeId + " for " + sb.toString(), x);
synchronized (indexesLock) {
boolean[] updateMerging = new boolean[merging.length];
int mi = 0;
for (int i = 0; i < indexes.length; i++) {
if (mi < mergeSet.length && indexes[i] == mergeSet[mi]) {
updateMerging[i] = false;
mi++;
} else {
updateMerging[i] = merging[i];
}
}
merging = updateMerging;
}
} finally {
for (ReadIndex reader : readers) {
if (reader != null) {
reader.release();
}
}
}
return null;
}
}
public boolean tx(int index,
boolean pointFrom,
byte[] fromKey,
byte[] toKey,
ReaderTx tx,
boolean hydrateValues) throws Exception {
ReadOnlyIndex[] stackIndexes;
ReadIndex[] readIndexs;
START_OVER:
while (true) {
synchronized (indexesLock) {
if (disposed) {
throw new LABConcurrentSplitException();
}
if (closed) {
throw new LABClosedException("");
}
stackIndexes = indexes;
}
readIndexs = new ReadIndex[stackIndexes.length];
try {
for (int i = 0; i < readIndexs.length; i++) {
readIndexs[i] = stackIndexes[i].acquireReader();
if (readIndexs[i] == null) {
releaseReaders(readIndexs);
continue START_OVER;
}
}
} catch (Exception x) {
releaseReaders(readIndexs);
throw x;
}
break;
}
try {
return tx.tx(index, pointFrom, fromKey, toKey, readIndexs, hydrateValues);
} finally {
releaseReaders(readIndexs);
}
}
private void releaseReaders(ReadIndex[] readIndexs) {
for (int i = 0; i < readIndexs.length; i++) {
if (readIndexs[i] != null) {
readIndexs[i].release();
readIndexs[i] = null;
}
}
}
private ReadOnlyIndex[] grab() {
ReadOnlyIndex[] copy;
synchronized (indexesLock) {
copy = indexes;
}
return copy;
}
void auditRanges(String prefix, KeyToString keyToString) {
ReadOnlyIndex[] copy;
synchronized (indexesLock) {
copy = indexes;
}
for (ReadOnlyIndex aCopy : copy) {
System.out.println(
prefix + keyToString.keyToString(aCopy.minKey()) + "->" + keyToString.keyToString(aCopy.maxKey()));
}
}
}
|
|
/**
* Copyright (c) 2000-2013 Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package com.liferay.docs.guestbook.service;
import com.liferay.portal.service.InvokableLocalService;
/**
* @author Rich Sezov
* @generated
*/
public class EntryLocalServiceClp implements EntryLocalService {
public EntryLocalServiceClp(InvokableLocalService invokableLocalService) {
_invokableLocalService = invokableLocalService;
_methodName0 = "addEntry";
_methodParameterTypes0 = new String[] {
"com.liferay.docs.guestbook.model.Entry"
};
_methodName1 = "createEntry";
_methodParameterTypes1 = new String[] { "long" };
_methodName2 = "deleteEntry";
_methodParameterTypes2 = new String[] { "long" };
_methodName3 = "deleteEntry";
_methodParameterTypes3 = new String[] {
"com.liferay.docs.guestbook.model.Entry"
};
_methodName4 = "dynamicQuery";
_methodParameterTypes4 = new String[] { };
_methodName5 = "dynamicQuery";
_methodParameterTypes5 = new String[] {
"com.liferay.portal.kernel.dao.orm.DynamicQuery"
};
_methodName6 = "dynamicQuery";
_methodParameterTypes6 = new String[] {
"com.liferay.portal.kernel.dao.orm.DynamicQuery", "int", "int"
};
_methodName7 = "dynamicQuery";
_methodParameterTypes7 = new String[] {
"com.liferay.portal.kernel.dao.orm.DynamicQuery", "int", "int",
"com.liferay.portal.kernel.util.OrderByComparator"
};
_methodName8 = "dynamicQueryCount";
_methodParameterTypes8 = new String[] {
"com.liferay.portal.kernel.dao.orm.DynamicQuery"
};
_methodName9 = "dynamicQueryCount";
_methodParameterTypes9 = new String[] {
"com.liferay.portal.kernel.dao.orm.DynamicQuery",
"com.liferay.portal.kernel.dao.orm.Projection"
};
_methodName10 = "fetchEntry";
_methodParameterTypes10 = new String[] { "long" };
_methodName11 = "fetchEntryByUuidAndCompanyId";
_methodParameterTypes11 = new String[] { "java.lang.String", "long" };
_methodName12 = "fetchEntryByUuidAndGroupId";
_methodParameterTypes12 = new String[] { "java.lang.String", "long" };
_methodName13 = "getEntry";
_methodParameterTypes13 = new String[] { "long" };
_methodName14 = "getPersistedModel";
_methodParameterTypes14 = new String[] { "java.io.Serializable" };
_methodName15 = "getEntryByUuidAndCompanyId";
_methodParameterTypes15 = new String[] { "java.lang.String", "long" };
_methodName16 = "getEntryByUuidAndGroupId";
_methodParameterTypes16 = new String[] { "java.lang.String", "long" };
_methodName17 = "getEntries";
_methodParameterTypes17 = new String[] { "int", "int" };
_methodName18 = "getEntriesCount";
_methodParameterTypes18 = new String[] { };
_methodName19 = "updateEntry";
_methodParameterTypes19 = new String[] {
"com.liferay.docs.guestbook.model.Entry"
};
_methodName20 = "getBeanIdentifier";
_methodParameterTypes20 = new String[] { };
_methodName21 = "setBeanIdentifier";
_methodParameterTypes21 = new String[] { "java.lang.String" };
_methodName23 = "getEntriesByG_G_N";
_methodParameterTypes23 = new String[] {
"long", "long", "java.lang.String"
};
_methodName24 = "getEntries";
_methodParameterTypes24 = new String[] { "long", "long" };
_methodName25 = "getEntries";
_methodParameterTypes25 = new String[] {
"long", "long", "int", "int", "int"
};
_methodName26 = "getEntriesCount";
_methodParameterTypes26 = new String[] { "long", "long", "int" };
_methodName27 = "deleteEntry";
_methodParameterTypes27 = new String[] {
"long", "com.liferay.portal.service.ServiceContext"
};
_methodName28 = "addEntry";
_methodParameterTypes28 = new String[] {
"long", "long", "java.lang.String", "java.lang.String",
"java.lang.String", "com.liferay.portal.service.ServiceContext"
};
_methodName29 = "updateEntry";
_methodParameterTypes29 = new String[] {
"long", "long", "long", "java.lang.String", "java.lang.String",
"java.lang.String", "com.liferay.portal.service.ServiceContext"
};
_methodName30 = "updateStatus";
_methodParameterTypes30 = new String[] {
"long", "long", "int",
"com.liferay.portal.service.ServiceContext"
};
}
@Override
public com.liferay.docs.guestbook.model.Entry addEntry(
com.liferay.docs.guestbook.model.Entry entry)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName0,
_methodParameterTypes0,
new Object[] { ClpSerializer.translateInput(entry) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry createEntry(long entryId) {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName1,
_methodParameterTypes1, new Object[] { entryId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry deleteEntry(long entryId)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName2,
_methodParameterTypes2, new Object[] { entryId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry deleteEntry(
com.liferay.docs.guestbook.model.Entry entry)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName3,
_methodParameterTypes3,
new Object[] { ClpSerializer.translateInput(entry) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery() {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName4,
_methodParameterTypes4, new Object[] { });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.portal.kernel.dao.orm.DynamicQuery)ClpSerializer.translateOutput(returnObj);
}
@Override
@SuppressWarnings("rawtypes")
public java.util.List dynamicQuery(
com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName5,
_methodParameterTypes5,
new Object[] { ClpSerializer.translateInput(dynamicQuery) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List)ClpSerializer.translateOutput(returnObj);
}
@Override
@SuppressWarnings("rawtypes")
public java.util.List dynamicQuery(
com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery, int start,
int end) throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName6,
_methodParameterTypes6,
new Object[] {
ClpSerializer.translateInput(dynamicQuery),
start,
end
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List)ClpSerializer.translateOutput(returnObj);
}
@Override
@SuppressWarnings("rawtypes")
public java.util.List dynamicQuery(
com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery, int start,
int end,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName7,
_methodParameterTypes7,
new Object[] {
ClpSerializer.translateInput(dynamicQuery),
start,
end,
ClpSerializer.translateInput(orderByComparator)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List)ClpSerializer.translateOutput(returnObj);
}
@Override
public long dynamicQueryCount(
com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName8,
_methodParameterTypes8,
new Object[] { ClpSerializer.translateInput(dynamicQuery) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Long)returnObj).longValue();
}
@Override
public long dynamicQueryCount(
com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery,
com.liferay.portal.kernel.dao.orm.Projection projection)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName9,
_methodParameterTypes9,
new Object[] {
ClpSerializer.translateInput(dynamicQuery),
ClpSerializer.translateInput(projection)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Long)returnObj).longValue();
}
@Override
public com.liferay.docs.guestbook.model.Entry fetchEntry(long entryId)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName10,
_methodParameterTypes10, new Object[] { entryId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry fetchEntryByUuidAndCompanyId(
java.lang.String uuid, long companyId)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName11,
_methodParameterTypes11,
new Object[] { ClpSerializer.translateInput(uuid), companyId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry fetchEntryByUuidAndGroupId(
java.lang.String uuid, long groupId)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName12,
_methodParameterTypes12,
new Object[] { ClpSerializer.translateInput(uuid), groupId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry getEntry(long entryId)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName13,
_methodParameterTypes13, new Object[] { entryId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.portal.model.PersistedModel getPersistedModel(
java.io.Serializable primaryKeyObj)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName14,
_methodParameterTypes14,
new Object[] { ClpSerializer.translateInput(primaryKeyObj) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.portal.model.PersistedModel)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry getEntryByUuidAndCompanyId(
java.lang.String uuid, long companyId)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName15,
_methodParameterTypes15,
new Object[] { ClpSerializer.translateInput(uuid), companyId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry getEntryByUuidAndGroupId(
java.lang.String uuid, long groupId)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName16,
_methodParameterTypes16,
new Object[] { ClpSerializer.translateInput(uuid), groupId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.util.List<com.liferay.docs.guestbook.model.Entry> getEntries(
int start, int end)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName17,
_methodParameterTypes17, new Object[] { start, end });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<com.liferay.docs.guestbook.model.Entry>)ClpSerializer.translateOutput(returnObj);
}
@Override
public int getEntriesCount()
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName18,
_methodParameterTypes18, new Object[] { });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Integer)returnObj).intValue();
}
@Override
public com.liferay.docs.guestbook.model.Entry updateEntry(
com.liferay.docs.guestbook.model.Entry entry)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName19,
_methodParameterTypes19,
new Object[] { ClpSerializer.translateInput(entry) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.lang.String getBeanIdentifier() {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName20,
_methodParameterTypes20, new Object[] { });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.lang.String)ClpSerializer.translateOutput(returnObj);
}
@Override
public void setBeanIdentifier(java.lang.String beanIdentifier) {
try {
_invokableLocalService.invokeMethod(_methodName21,
_methodParameterTypes21,
new Object[] { ClpSerializer.translateInput(beanIdentifier) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
}
@Override
public java.lang.Object invokeMethod(java.lang.String name,
java.lang.String[] parameterTypes, java.lang.Object[] arguments)
throws java.lang.Throwable {
throw new UnsupportedOperationException();
}
@Override
public java.util.List<com.liferay.docs.guestbook.model.Entry> getEntriesByG_G_N(
long groupId, long guestbookId, java.lang.String name)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName23,
_methodParameterTypes23,
new Object[] {
groupId,
guestbookId,
ClpSerializer.translateInput(name)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<com.liferay.docs.guestbook.model.Entry>)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.util.List<com.liferay.docs.guestbook.model.Entry> getEntries(
long groupId, long guestbookId)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName24,
_methodParameterTypes24,
new Object[] { groupId, guestbookId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<com.liferay.docs.guestbook.model.Entry>)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.util.List<com.liferay.docs.guestbook.model.Entry> getEntries(
long groupId, long guestbookId, int status, int start, int end)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName25,
_methodParameterTypes25,
new Object[] { groupId, guestbookId, status, start, end });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<com.liferay.docs.guestbook.model.Entry>)ClpSerializer.translateOutput(returnObj);
}
@Override
public int getEntriesCount(long groupId, long guestbookId, int status)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName26,
_methodParameterTypes26,
new Object[] { groupId, guestbookId, status });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Integer)returnObj).intValue();
}
@Override
public com.liferay.docs.guestbook.model.Entry deleteEntry(long entryId,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName27,
_methodParameterTypes27,
new Object[] {
entryId,
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry addEntry(long userId,
long guestbookId, java.lang.String name, java.lang.String email,
java.lang.String message,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName28,
_methodParameterTypes28,
new Object[] {
userId,
guestbookId,
ClpSerializer.translateInput(name),
ClpSerializer.translateInput(email),
ClpSerializer.translateInput(message),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry updateEntry(long userId,
long guestbookId, long entryId, java.lang.String name,
java.lang.String email, java.lang.String message,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName29,
_methodParameterTypes29,
new Object[] {
userId,
guestbookId,
entryId,
ClpSerializer.translateInput(name),
ClpSerializer.translateInput(email),
ClpSerializer.translateInput(message),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.docs.guestbook.model.Entry updateStatus(long userId,
long entryId, int status,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName30,
_methodParameterTypes30,
new Object[] {
userId,
entryId,
status,
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.docs.guestbook.model.Entry)ClpSerializer.translateOutput(returnObj);
}
private InvokableLocalService _invokableLocalService;
private String _methodName0;
private String[] _methodParameterTypes0;
private String _methodName1;
private String[] _methodParameterTypes1;
private String _methodName2;
private String[] _methodParameterTypes2;
private String _methodName3;
private String[] _methodParameterTypes3;
private String _methodName4;
private String[] _methodParameterTypes4;
private String _methodName5;
private String[] _methodParameterTypes5;
private String _methodName6;
private String[] _methodParameterTypes6;
private String _methodName7;
private String[] _methodParameterTypes7;
private String _methodName8;
private String[] _methodParameterTypes8;
private String _methodName9;
private String[] _methodParameterTypes9;
private String _methodName10;
private String[] _methodParameterTypes10;
private String _methodName11;
private String[] _methodParameterTypes11;
private String _methodName12;
private String[] _methodParameterTypes12;
private String _methodName13;
private String[] _methodParameterTypes13;
private String _methodName14;
private String[] _methodParameterTypes14;
private String _methodName15;
private String[] _methodParameterTypes15;
private String _methodName16;
private String[] _methodParameterTypes16;
private String _methodName17;
private String[] _methodParameterTypes17;
private String _methodName18;
private String[] _methodParameterTypes18;
private String _methodName19;
private String[] _methodParameterTypes19;
private String _methodName20;
private String[] _methodParameterTypes20;
private String _methodName21;
private String[] _methodParameterTypes21;
private String _methodName23;
private String[] _methodParameterTypes23;
private String _methodName24;
private String[] _methodParameterTypes24;
private String _methodName25;
private String[] _methodParameterTypes25;
private String _methodName26;
private String[] _methodParameterTypes26;
private String _methodName27;
private String[] _methodParameterTypes27;
private String _methodName28;
private String[] _methodParameterTypes28;
private String _methodName29;
private String[] _methodParameterTypes29;
private String _methodName30;
private String[] _methodParameterTypes30;
}
|
|
package ui.client;
import common.client.Func;
import jsinterop.annotations.JsOverlay;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
import react.client.*;
import javax.inject.Inject;
import javax.inject.Singleton;
/**
*
*/
@Singleton
public class Table extends ExternalComponent<Table.Props> {
@Inject
public Table() {
}
@Override
protected native ReactClass getReactClass() /*-{
return $wnd.MaterialUi.Table;
}-*/;
@JsType(isNative = true)
public interface Props extends BaseProps {
// boolean allRowsSelected;
// StyleProps bodyStyle;
// String className;
// boolean fixedFooter = true;
// boolean fixedHeader = true;
// StyleProps footerStyle;
// StyleProps headerStyle;
// String height; // defaults to inherit
// boolean multiSelectable;
// boolean selectable;
// StyleProps style;
// StyleProps wrapperStyle;
// MouseEventHandler onCellClick; // func - works in place of onClick
// MouseEventHandler onCellHover; // func
// MouseEventHandler onCellHoverExit; // func
// MouseEventHandler onRowHover; // func
// MouseEventHandler onRowHoverExit; // func
// Func.Run onRowSelection; // func
@JsProperty
boolean isAllRowsSelected();
@JsProperty
void setAllRowsSelected(boolean allRowsSelected);
@JsProperty
StyleProps getBodyStyle();
@JsProperty
void setBodyStyle(StyleProps bodyStyle);
@JsProperty
String getClassName();
@JsProperty
void setClassName(String className);
@JsProperty
boolean isFixedFooter();
@JsProperty
void setFixedFooter(boolean fixedFooter);
@JsProperty
boolean isFixedHeader();
@JsProperty
void setFixedHeader(boolean fixedHeader);
@JsProperty
StyleProps getFooterStyle();
@JsProperty
void setFooterStyle(StyleProps footerStyle);
@JsProperty
StyleProps getHeaderStyle();
@JsProperty
void setHeaderStyle(StyleProps headerStyle);
@JsProperty
String getHeight();
@JsProperty
void setHeight(String height);
@JsProperty
boolean isMultiSelectable();
@JsProperty
void setMultiSelectable(boolean multiSelectable);
@JsProperty
boolean isSelectable();
@JsProperty
void setSelectable(boolean selectable);
@JsProperty
StyleProps getStyle();
@JsProperty
void setStyle(StyleProps style);
@JsProperty
StyleProps getWrapperStyle();
@JsProperty
void setWrapperStyle(StyleProps wrapperStyle);
@JsProperty
MouseEventHandler getOnCellClick();
@JsProperty
void setOnCellClick(MouseEventHandler onCellClick);
@JsProperty
MouseEventHandler getOnCellHover();
@JsProperty
void setOnCellHover(MouseEventHandler onCellHover);
@JsProperty
MouseEventHandler getOnCellHoverExit();
@JsProperty
void setOnCellHoverExit(MouseEventHandler onCellHoverExit);
@JsProperty
MouseEventHandler getOnRowHover();
@JsProperty
void setOnRowHover(MouseEventHandler onRowHover);
@JsProperty
MouseEventHandler getOnRowHoverExit();
@JsProperty
void setOnRowHoverExit(MouseEventHandler onRowHoverExit);
@JsProperty
Func.Run getOnRowSelection();
@JsProperty
void setOnRowSelection(Func.Run onRowSelection);
////////////////////
// fluent setters
////////////////////
@JsOverlay
default Props allRowsSelected(final boolean allRowsSelected) {
setAllRowsSelected(allRowsSelected);
return this;
}
@JsOverlay
default Props bodyStyle(final StyleProps bodyStyle) {
setBodyStyle(bodyStyle);
return this;
}
@JsOverlay
default Props className(final String className) {
setClassName(className);
return this;
}
@JsOverlay
default Props fixedFooter(final boolean fixedFooter) {
setFixedFooter(fixedFooter);
return this;
}
@JsOverlay
default Props fixedHeader(final boolean fixedHeader) {
setFixedHeader(fixedHeader);
return this;
}
@JsOverlay
default Props footerStyle(final StyleProps footerStyle) {
setFooterStyle(footerStyle);
return this;
}
@JsOverlay
default Props headerStyle(final StyleProps headerStyle) {
setHeaderStyle(headerStyle);
return this;
}
@JsOverlay
default Props height(final String height) {
setHeight(height);
return this;
}
@JsOverlay
default Props multiSelectable(final boolean multiSelectable) {
setMultiSelectable(multiSelectable);
return this;
}
@JsOverlay
default Props selectable(final boolean selectable) {
setSelectable(selectable);
return this;
}
@JsOverlay
default Props style(final StyleProps style) {
setStyle(style);
return this;
}
@JsOverlay
default Props wrapperStyle(final StyleProps wrapperStyle) {
setWrapperStyle(wrapperStyle);
return this;
}
@JsOverlay
default Props onCellClick(final MouseEventHandler onCellClick) {
setOnCellClick(onCellClick);
return this;
}
@JsOverlay
default Props onCellHover(final MouseEventHandler onCellHover) {
setOnCellHover(onCellHover);
return this;
}
@JsOverlay
default Props onCellHoverExit(final MouseEventHandler onCellHoverExit) {
setOnCellHoverExit(onCellHoverExit);
return this;
}
@JsOverlay
default Props onRowHover(final MouseEventHandler onRowHover) {
setOnRowHover(onRowHover);
return this;
}
@JsOverlay
default Props onRowHoverExit(final MouseEventHandler onRowHoverExit) {
setOnRowHoverExit(onRowHoverExit);
return this;
}
@JsOverlay
default Props onRowSelection(final Func.Run onRowSelection) {
setOnRowSelection(onRowSelection);
return this;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.CacheLockCandidates;
import org.apache.ignite.internal.processors.cache.CacheObject;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException;
import org.apache.ignite.internal.processors.cache.GridCacheMultiTxFuture;
import org.apache.ignite.internal.processors.cache.GridCacheMvcc;
import org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.distributed.GridDistributedCacheEntry;
import org.apache.ignite.internal.processors.cache.distributed.GridDistributedLockCancelledException;
import org.apache.ignite.internal.processors.cache.extras.GridCacheObsoleteEntryExtras;
import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.util.lang.GridPlainRunnable;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.C1;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.lang.IgniteClosure;
import org.jetbrains.annotations.Nullable;
/**
* Replicated cache entry.
*/
@SuppressWarnings({"TooBroadScope", "NonPrivateFieldAccessedInSynchronizedContext"})
public class GridDhtCacheEntry extends GridDistributedCacheEntry {
/** Size overhead. */
private static final int DHT_SIZE_OVERHEAD = 16;
/** Gets node value from reader ID. */
private static final IgniteClosure<ReaderId, UUID> R2N = new C1<ReaderId, UUID>() {
@Override public UUID apply(ReaderId e) {
return e.nodeId();
}
};
/** Reader clients. */
@GridToStringInclude
private volatile ReaderId[] rdrs = ReaderId.EMPTY_ARRAY;
/** Local partition. */
private final GridDhtLocalPartition locPart;
/**
* @param ctx Cache context.
* @param topVer Topology version at the time of creation (if negative, then latest topology is assumed).
* @param key Cache key.
*/
public GridDhtCacheEntry(
GridCacheContext ctx,
AffinityTopologyVersion topVer,
KeyCacheObject key
) {
super(ctx, key);
// Record this entry with partition.
int p = cctx.affinity().partition(key);
locPart = ctx.topology().localPartition(p, topVer, true);
assert locPart != null : p;
}
/** {@inheritDoc} */
@Override protected long nextPartitionCounter(AffinityTopologyVersion topVer,
boolean primary,
@Nullable Long primaryCntr) {
return locPart.nextUpdateCounter(cctx.cacheId(), topVer, primary, primaryCntr);
}
/** {@inheritDoc} */
@Override public int memorySize() throws IgniteCheckedException {
int rdrsOverhead;
synchronized (this) {
rdrsOverhead = ReaderId.READER_ID_SIZE * rdrs.length;
}
return super.memorySize() + DHT_SIZE_OVERHEAD + rdrsOverhead;
}
/** {@inheritDoc} */
@Override public int partition() {
return locPart.id();
}
/** {@inheritDoc} */
@Override protected GridDhtLocalPartition localPartition() {
return locPart;
}
/** {@inheritDoc} */
@Override protected void onUpdateFinished(long cntr) {
if (cctx.shared().database().persistenceEnabled())
locPart.onUpdateReceived(cntr);
}
/** {@inheritDoc} */
@Override public boolean isDht() {
return true;
}
/** {@inheritDoc} */
@Override public boolean partitionValid() {
return locPart.valid();
}
/** {@inheritDoc} */
@Override public void onMarkedObsolete() {
assert !Thread.holdsLock(this);
// Remove this entry from partition mapping.
cctx.topology().onRemoved(this);
}
/**
* @param nearVer Near version.
* @param rmv If {@code true}, then add to removed list if not found.
* @return Local candidate by near version.
* @throws GridCacheEntryRemovedException If removed.
*/
@Nullable synchronized GridCacheMvccCandidate localCandidateByNearVersion(GridCacheVersion nearVer,
boolean rmv) throws GridCacheEntryRemovedException {
checkObsolete();
GridCacheMvcc mvcc = mvccExtras();
if (mvcc != null) {
for (GridCacheMvccCandidate c : mvcc.localCandidatesNoCopy(false)) {
GridCacheVersion ver = c.otherVersion();
if (ver != null && ver.equals(nearVer))
return c;
}
}
if (rmv)
addRemoved(nearVer);
return null;
}
/**
* Add local candidate.
*
* @param nearNodeId Near node ID.
* @param nearVer Near version.
* @param topVer Topology version.
* @param threadId Owning thread ID.
* @param ver Lock version.
* @param serOrder Version for serializable transactions ordering.
* @param timeout Timeout to acquire lock.
* @param reenter Reentry flag.
* @param tx Tx flag.
* @param implicitSingle Implicit flag.
* @param read Read lock flag.
* @return New candidate.
* @throws GridCacheEntryRemovedException If entry has been removed.
* @throws GridDistributedLockCancelledException If lock was cancelled.
*/
@Nullable GridCacheMvccCandidate addDhtLocal(
UUID nearNodeId,
GridCacheVersion nearVer,
AffinityTopologyVersion topVer,
long threadId,
GridCacheVersion ver,
@Nullable GridCacheVersion serOrder,
long timeout,
boolean reenter,
boolean tx,
boolean implicitSingle,
boolean read)
throws GridCacheEntryRemovedException, GridDistributedLockCancelledException {
assert !reenter || serOrder == null;
GridCacheMvccCandidate cand;
CacheLockCandidates prev;
CacheLockCandidates owner;
CacheObject val;
synchronized (this) {
// Check removed locks prior to obsolete flag.
checkRemoved(ver);
checkRemoved(nearVer);
checkObsolete();
GridCacheMvcc mvcc = mvccExtras();
if (mvcc == null) {
mvcc = new GridCacheMvcc(cctx);
mvccExtras(mvcc);
}
prev = mvcc.allOwners();
boolean emptyBefore = mvcc.isEmpty();
cand = mvcc.addLocal(
this,
nearNodeId,
nearVer,
threadId,
ver,
timeout,
serOrder,
reenter,
tx,
implicitSingle,
/*dht-local*/true,
read
);
if (cand == null)
return null;
cand.topologyVersion(topVer);
owner = mvcc.allOwners();
if (owner != null)
cand.ownerVersion(owner.candidate(0).version());
boolean emptyAfter = mvcc.isEmpty();
checkCallbacks(emptyBefore, emptyAfter);
val = this.val;
if (mvcc.isEmpty())
mvccExtras(null);
}
// Don't link reentries.
if (!cand.reentry())
// Link with other candidates in the same thread.
cctx.mvcc().addNext(cctx, cand);
checkOwnerChanged(prev, owner, val);
return cand;
}
/** {@inheritDoc} */
@Override public boolean tmLock(IgniteInternalTx tx,
long timeout,
@Nullable GridCacheVersion serOrder,
GridCacheVersion serReadVer,
boolean read
) throws GridCacheEntryRemovedException, GridDistributedLockCancelledException {
if (tx.local()) {
GridDhtTxLocalAdapter dhtTx = (GridDhtTxLocalAdapter)tx;
// Null is returned if timeout is negative and there is other lock owner.
return addDhtLocal(
dhtTx.nearNodeId(),
dhtTx.nearXidVersion(),
tx.topologyVersion(),
tx.threadId(),
tx.xidVersion(),
serOrder,
timeout,
/*reenter*/false,
/*tx*/true,
tx.implicitSingle(),
read) != null;
}
try {
addRemote(
tx.nodeId(),
tx.otherNodeId(),
tx.threadId(),
tx.xidVersion(),
/*tx*/true,
tx.implicit(),
null);
return true;
}
catch (GridDistributedLockCancelledException ignored) {
if (log.isDebugEnabled())
log.debug("Attempted to enter tx lock for cancelled ID (will ignore): " + tx);
return false;
}
}
/** {@inheritDoc} */
@Override public GridCacheMvccCandidate removeLock() {
GridCacheMvccCandidate ret = super.removeLock();
locPart.onUnlock();
return ret;
}
/** {@inheritDoc} */
@Override public boolean removeLock(GridCacheVersion ver) throws GridCacheEntryRemovedException {
boolean ret = super.removeLock(ver);
locPart.onUnlock();
return ret;
}
/** {@inheritDoc} */
@Override public void onUnlock() {
locPart.onUnlock();
}
/**
* @param topVer Topology version.
* @return Tuple with version and value of this entry, or {@code null} if entry is new.
* @throws GridCacheEntryRemovedException If entry has been removed.
*/
@SuppressWarnings({"NonPrivateFieldAccessedInSynchronizedContext"})
@Nullable public synchronized IgniteBiTuple<GridCacheVersion, CacheObject> versionedValue(
AffinityTopologyVersion topVer)
throws GridCacheEntryRemovedException {
if (isNew() || !valid(AffinityTopologyVersion.NONE) || deletedUnlocked())
return null;
else {
CacheObject val0 = this.val;
return F.t(ver, val0);
}
}
/**
* @return Readers.
* @throws GridCacheEntryRemovedException If removed.
*/
public Collection<UUID> readers() throws GridCacheEntryRemovedException {
return F.viewReadOnly(checkReaders(), R2N);
}
/**
* @param nodeId Node ID.
* @return reader ID.
*/
@Nullable public ReaderId readerId(UUID nodeId) {
ReaderId[] rdrs = this.rdrs;
for (ReaderId reader : rdrs) {
if (reader.nodeId().equals(nodeId))
return reader;
}
return null;
}
/**
* @param nodeId Reader to add.
* @param msgId Message ID.
* @param topVer Topology version.
* @return Future for all relevant transactions that were active at the time of adding reader,
* or {@code null} if reader was added
* @throws GridCacheEntryRemovedException If entry was removed.
*/
@SuppressWarnings("unchecked")
@Nullable public IgniteInternalFuture<Boolean> addReader(UUID nodeId, long msgId, AffinityTopologyVersion topVer)
throws GridCacheEntryRemovedException {
// Don't add local node as reader.
if (cctx.nodeId().equals(nodeId))
return null;
ClusterNode node = cctx.discovery().node(nodeId);
if (node == null) {
if (log.isDebugEnabled())
log.debug("Ignoring near reader because node left the grid: " + nodeId);
return null;
}
// If remote node has no near cache, don't add it.
if (!cctx.discovery().cacheNearNode(node, cacheName())) {
if (log.isDebugEnabled())
log.debug("Ignoring near reader because near cache is disabled: " + nodeId);
return null;
}
// If remote node is (primary?) or back up, don't add it as a reader.
if (cctx.affinity().partitionBelongs(node, partition(), topVer)) {
if (log.isDebugEnabled())
log.debug("Ignoring near reader because remote node is affinity node [locNodeId=" + cctx.localNodeId()
+ ", rmtNodeId=" + nodeId + ", key=" + key + ']');
return null;
}
boolean ret = false;
GridCacheMultiTxFuture txFut = null;
Collection<GridCacheMvccCandidate> cands = null;
ReaderId reader;
synchronized (this) {
checkObsolete();
reader = readerId(nodeId);
if (reader == null) {
reader = new ReaderId(nodeId, msgId);
ReaderId[] rdrs = Arrays.copyOf(this.rdrs, this.rdrs.length + 1);
rdrs[rdrs.length - 1] = reader;
// Seal.
this.rdrs = rdrs;
// No transactions in ATOMIC cache.
if (!cctx.atomic()) {
txFut = reader.getOrCreateTxFuture(cctx);
cands = localCandidates();
ret = true;
}
}
else {
txFut = reader.txFuture();
long id = reader.messageId();
if (id < msgId)
reader.messageId(msgId);
}
}
if (ret) {
assert txFut != null;
if (!F.isEmpty(cands)) {
for (GridCacheMvccCandidate c : cands) {
IgniteInternalTx tx = cctx.tm().tx(c.version());
if (tx != null && tx.local())
txFut.addTx(tx);
}
}
txFut.init();
if (!txFut.isDone()) {
final ReaderId reader0 = reader;
txFut.listen(new CI1<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> f) {
cctx.kernalContext().closure().runLocalSafe(new GridPlainRunnable() {
@Override public void run() {
synchronized (this) {
// Release memory.
reader0.resetTxFuture();
}
}
});
}
});
}
else {
synchronized (this) {
// Release memory.
reader.resetTxFuture();
}
txFut = null;
}
}
return txFut;
}
/**
* @param nodeId Reader to remove.
* @param msgId Message ID.
* @return {@code True} if reader was removed as a result of this operation.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
@SuppressWarnings("unchecked")
public synchronized boolean removeReader(UUID nodeId, long msgId) throws GridCacheEntryRemovedException {
checkObsolete();
ReaderId[] rdrs = this.rdrs;
int readerIdx = -1;
for (int i = 0; i < rdrs.length; i++) {
if (rdrs[i].nodeId().equals(nodeId)) {
readerIdx = i;
break;
}
}
if (readerIdx == -1 || (rdrs[readerIdx].messageId() > msgId && msgId >= 0))
return false;
if (rdrs.length == 1)
this.rdrs = ReaderId.EMPTY_ARRAY;
else {
ReaderId[] newRdrs = Arrays.copyOf(rdrs, rdrs.length - 1);
System.arraycopy(rdrs, readerIdx + 1, newRdrs, readerIdx, rdrs.length - readerIdx - 1);
// Seal.
this.rdrs = newRdrs;
}
return true;
}
/**
* Clears all readers (usually when partition becomes invalid and ready for eviction).
*/
@SuppressWarnings("unchecked")
@Override public synchronized void clearReaders() {
rdrs = ReaderId.EMPTY_ARRAY;
}
/** {@inheritDoc} */
@Override public synchronized void clearReader(UUID nodeId) throws GridCacheEntryRemovedException {
removeReader(nodeId, -1);
}
/**
* Marks entry as obsolete and, if possible or required, removes it
* from swap storage.
*
* @param ver Obsolete version.
* @return {@code True} if entry was not being used, passed the filter and could be removed.
* @throws IgniteCheckedException If failed to remove from swap.
*/
public boolean clearInternal(
GridCacheVersion ver,
GridCacheObsoleteEntryExtras extras
) throws IgniteCheckedException {
boolean rmv = false;
try {
synchronized (this) {
// Call markObsolete0 to avoid recursive calls to clear if
// we are clearing dht local partition (onMarkedObsolete should not be called).
if (!markObsolete0(ver, false, extras)) {
if (log.isDebugEnabled())
log.debug("Entry could not be marked obsolete (it is still used or has readers): " + this);
return false;
}
rdrs = ReaderId.EMPTY_ARRAY;
if (log.isDebugEnabled())
log.debug("Entry has been marked obsolete: " + this);
if (log.isTraceEnabled()) {
log.trace("clearInternal [key=" + key +
", entry=" + System.identityHashCode(this) +
']');
}
removeValue();
// Give to GC.
update(null, 0L, 0L, ver, true);
if (cctx.store().isLocal())
cctx.store().remove(null, key);
rmv = true;
return true;
}
}
finally {
if (rmv)
cctx.cache().removeEntry(this); // Clear cache.
}
}
/**
* @return Collection of readers after check.
* @throws GridCacheEntryRemovedException If removed.
*/
public synchronized Collection<ReaderId> checkReaders() throws GridCacheEntryRemovedException {
return checkReadersLocked();
}
/**
* @return Collection of readers after check.
* @throws GridCacheEntryRemovedException If removed.
*/
@SuppressWarnings({"unchecked", "ManualArrayToCollectionCopy"})
protected Collection<ReaderId> checkReadersLocked() throws GridCacheEntryRemovedException {
assert Thread.holdsLock(this);
checkObsolete();
ReaderId[] rdrs = this.rdrs;
if (rdrs.length == 0)
return Collections.emptySet();
List<ReaderId> newRdrs = null;
for (int i = 0; i < rdrs.length; i++) {
ClusterNode node = cctx.discovery().getAlive(rdrs[i].nodeId());
if (node == null || !cctx.discovery().cacheNode(node, cacheName())) {
// Node has left and if new list has already been created, just skip.
// Otherwise, create new list and add alive nodes.
if (newRdrs == null) {
newRdrs = new ArrayList<>(rdrs.length);
for (int k = 0; k < i; k++)
newRdrs.add(rdrs[k]);
}
}
// If node is still alive and no failed nodes
// found yet, simply go to next iteration.
else if (newRdrs != null)
// Some of the nodes has left. Add to list.
newRdrs.add(rdrs[i]);
}
if (newRdrs != null) {
rdrs = newRdrs.toArray(new ReaderId[newRdrs.size()]);
this.rdrs = rdrs;
}
return Arrays.asList(rdrs);
}
/** {@inheritDoc} */
@Override protected synchronized boolean hasReaders() throws GridCacheEntryRemovedException {
checkReadersLocked();
return rdrs.length > 0;
}
/**
* Sets mappings into entry.
*
* @param ver Version.
* @return Candidate, if one existed for the version, or {@code null} if candidate was not found.
* @throws GridCacheEntryRemovedException If removed.
*/
@Nullable public synchronized GridCacheMvccCandidate mappings(
GridCacheVersion ver,
Collection<ClusterNode> dhtNodeIds,
Collection<ClusterNode> nearNodeIds
) throws GridCacheEntryRemovedException {
checkObsolete();
GridCacheMvcc mvcc = mvccExtras();
GridCacheMvccCandidate cand = mvcc == null ? null : mvcc.candidate(ver);
if (cand != null)
cand.mappedNodeIds(dhtNodeIds, nearNodeIds);
return cand;
}
/**
* @param ver Version.
* @param mappedNode Mapped node to remove.
*/
public synchronized void removeMapping(GridCacheVersion ver, ClusterNode mappedNode) {
GridCacheMvcc mvcc = mvccExtras();
GridCacheMvccCandidate cand = mvcc == null ? null : mvcc.candidate(ver);
if (cand != null)
cand.removeMappedNode(mappedNode);
}
/**
* @return Cache name.
*/
protected final String cacheName() {
return cctx.name();
}
/** {@inheritDoc} */
@Override public synchronized String toString() {
return S.toString(GridDhtCacheEntry.class, this, "super", super.toString());
}
/** {@inheritDoc} */
@Override protected void incrementMapPublicSize() {
locPart.incrementPublicSize(null, this);
}
/** {@inheritDoc} */
@Override protected void decrementMapPublicSize() {
locPart.decrementPublicSize(null, this);
}
/**
* Reader ID.
*/
private static class ReaderId {
/** */
private static final ReaderId[] EMPTY_ARRAY = new ReaderId[0];
/** Reader ID size. */
private static final int READER_ID_SIZE = 24;
/** Node ID. */
private UUID nodeId;
/** Message ID. */
private long msgId;
/** Transaction future. */
private GridCacheMultiTxFuture txFut;
/**
* @param nodeId Node ID.
* @param msgId Message ID.
*/
ReaderId(UUID nodeId, long msgId) {
this.nodeId = nodeId;
this.msgId = msgId;
}
/**
* @return Node ID.
*/
UUID nodeId() {
return nodeId;
}
/**
* @return Message ID.
*/
long messageId() {
return msgId;
}
/**
* @param msgId Message ID.
*/
void messageId(long msgId) {
this.msgId = msgId;
}
/**
* @param cctx Cache context.
* @return Transaction future.
*/
GridCacheMultiTxFuture getOrCreateTxFuture(GridCacheContext cctx) {
if (txFut == null)
txFut = new GridCacheMultiTxFuture<>(cctx);
return txFut;
}
/**
* @return Transaction future.
*/
GridCacheMultiTxFuture txFuture() {
return txFut;
}
/**
* Sets multi-transaction future to {@code null}.
*
* @return Previous transaction future.
*/
GridCacheMultiTxFuture resetTxFuture() {
GridCacheMultiTxFuture txFut = this.txFut;
this.txFut = null;
return txFut;
}
/** {@inheritDoc} */
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof ReaderId))
return false;
ReaderId readerId = (ReaderId)o;
return msgId == readerId.msgId && nodeId.equals(readerId.nodeId);
}
/** {@inheritDoc} */
@Override public int hashCode() {
int res = nodeId.hashCode();
res = 31 * res + (int)(msgId ^ (msgId >>> 32));
return res;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(ReaderId.class, this);
}
}
}
|
|
/*
* Floern, dev@floern.com, 2016, MIT Licence
*/
package com.floern.android.util;
import android.annotation.TargetApi;
import android.graphics.ColorFilter;
import android.graphics.MaskFilter;
import android.graphics.Paint;
import android.graphics.PathEffect;
import android.graphics.Shader;
import android.graphics.Typeface;
import android.graphics.Xfermode;
import android.os.Build;
import android.support.annotation.ColorInt;
import java.util.Locale;
/**
* A utility class to create a <code>Paint</code> using the builder pattern.
*
* @author Floern
*/
public class PaintBuilder {
private final Paint paint;
/**
* Create a new PaintBuilder based on a default Paint.
*/
public PaintBuilder() {
paint = new Paint();
}
/**
* Wrap a PaintBuilder around an existing Paint instance.
*
* @param source a Paint
*/
public PaintBuilder(Paint source) {
paint = source;
}
/**
* Set the paint's flags. Use the Flag enum to specific flag values.
*
* @param flags The new flag bits for the paint
*/
public PaintBuilder setFlags(int flags) {
paint.setFlags(flags);
return this;
}
/**
* Set the paint's hinting mode. May be either
* {@link Paint#HINTING_OFF} or {@link Paint#HINTING_ON}.
*/
public PaintBuilder setHinting(int mode) {
paint.setHinting(mode);
return this;
}
/**
* Helper for setFlags(), setting or clearing the ANTI_ALIAS_FLAG bit
* AntiAliasing smooths out the edges of what is being drawn, but is has
* no impact on the interior of the shape. See setDither() and
* setFilterBitmap() to affect how colors are treated.
*
* @param aa true to set the antialias bit in the flags, false to clear it
*/
public PaintBuilder setAntiAlias(boolean aa) {
paint.setAntiAlias(aa);
return this;
}
/**
* Helper for setFlags(), setting or clearing the DITHER_FLAG bit
* Dithering affects how colors that are higher precision than the device
* are down-sampled. No dithering is generally faster, but higher precision
* colors are just truncated down (e.g. 8888 -> 565). Dithering tries to
* distribute the error inherent in this process, to reduce the visual
* artifacts.
*
* @param dither true to set the dithering bit in flags, false to clear it
*/
public PaintBuilder setDither(boolean dither) {
paint.setDither(dither);
return this;
}
/**
* Helper for setFlags(), setting or clearing the LINEAR_TEXT_FLAG bit
*
* @param linearText true to set the linearText bit in the paint's flags,
* false to clear it.
*/
public PaintBuilder setLinearText(boolean linearText) {
paint.setLinearText(linearText);
return this;
}
/**
* Helper for setFlags(), setting or clearing the SUBPIXEL_TEXT_FLAG bit
*
* @param subpixelText true to set the subpixelText bit in the paint's
* flags, false to clear it.
*/
public PaintBuilder setSubpixelText(boolean subpixelText) {
paint.setSubpixelText(subpixelText);
return this;
}
/**
* Helper for setFlags(), setting or clearing the UNDERLINE_TEXT_FLAG bit
*
* @param underlineText true to set the underlineText bit in the paint's
* flags, false to clear it.
*/
public PaintBuilder setUnderlineText(boolean underlineText) {
paint.setUnderlineText(underlineText);
return this;
}
/**
* Helper for setFlags(), setting or clearing the STRIKE_THRU_TEXT_FLAG bit
*
* @param strikeThruText true to set the strikeThruText bit in the paint's
* flags, false to clear it.
*/
public PaintBuilder setStrikeThruText(boolean strikeThruText) {
paint.setStrikeThruText(strikeThruText);
return this;
}
/**
* Helper for setFlags(), setting or clearing the FAKE_BOLD_TEXT_FLAG bit
*
* @param fakeBoldText true to set the fakeBoldText bit in the paint's
* flags, false to clear it.
*/
public PaintBuilder setFakeBoldText(boolean fakeBoldText) {
paint.setFakeBoldText(fakeBoldText);
return this;
}
/**
* Helper for setFlags(), setting or clearing the FILTER_BITMAP_FLAG bit.
* Filtering affects the sampling of bitmaps when they are transformed.
* Filtering does not affect how the colors in the bitmap are converted into
* device pixels. That is dependent on dithering and xfermodes.
*
* @param filter true to set the FILTER_BITMAP_FLAG bit in the paint's
* flags, false to clear it.
*/
public PaintBuilder setFilterBitmap(boolean filter) {
paint.setFilterBitmap(filter);
return this;
}
/**
* Set the paint's style, used for controlling how primitives'
* geometries are interpreted (except for drawBitmap, which always assumes
* Fill).
*
* @param style The new style to set in the paint
*/
public PaintBuilder setStyle(Paint.Style style) {
paint.setStyle(style);
return this;
}
/**
* Set the paint's color. Note that the color is an int containing alpha
* as well as r,g,b. This 32bit value is not premultiplied, meaning that
* its alpha can be any value, regardless of the values of r,g,b.
* See the Color class for more details.
*
* @param color The new color (including alpha) to set in the paint.
*/
public PaintBuilder setColor(@ColorInt int color) {
paint.setColor(color);
return this;
}
/**
* Helper to setColor(), that only assigns the color's alpha value,
* leaving its r,g,b values unchanged. Results are undefined if the alpha
* value is outside of the range [0..255]
*
* @param a set the alpha component [0..255] of the paint's color.
*/
public PaintBuilder setAlpha(int a) {
paint.setAlpha(a);
return this;
}
/**
* Helper to setColor(), that takes a,r,g,b and constructs the color int
*
* @param a The new alpha component (0..255) of the paint's color.
* @param r The new red component (0..255) of the paint's color.
* @param g The new green component (0..255) of the paint's color.
* @param b The new blue component (0..255) of the paint's color.
*/
public PaintBuilder setARGB(int a, int r, int g, int b) {
paint.setARGB(a, r, g, b);
return this;
}
/**
* Set the width for stroking.
* Pass 0 to stroke in hairline mode.
* Hairlines always draws a single pixel independent of the canva's matrix.
*
* @param width set the paint's stroke width, used whenever the paint's
* style is Stroke or StrokeAndFill.
*/
public PaintBuilder setStrokeWidth(float width) {
paint.setStrokeWidth(width);
return this;
}
/**
* Set the paint's stroke miter value. This is used to control the behavior
* of miter joins when the joins angle is sharp. This value must be >= 0.
*
* @param miter set the miter limit on the paint, used whenever the paint's
* style is Stroke or StrokeAndFill.
*/
public PaintBuilder setStrokeMiter(float miter) {
paint.setStrokeMiter(miter);
return this;
}
/**
* Set the paint's Cap.
*
* @param cap set the paint's line cap style, used whenever the paint's
* style is Stroke or StrokeAndFill.
*/
public PaintBuilder setStrokeCap(Paint.Cap cap) {
paint.setStrokeCap(cap);
return this;
}
/**
* Set the paint's Join.
*
* @param join set the paint's Join, used whenever the paint's style is
* Stroke or StrokeAndFill.
*/
public PaintBuilder setStrokeJoin(Paint.Join join) {
paint.setStrokeJoin(join);
return this;
}
/**
* Set or clear the shader object.
* <p/>
* Pass null to clear any previous shader.
*
* @param shader May be null. the new shader to be installed in the paint
*/
public PaintBuilder setShader(Shader shader) {
paint.setShader(shader);
return this;
}
/**
* Set or clear the paint's colorfilter.
*
* @param filter May be null. The new filter to be installed in the paint
*/
public PaintBuilder setColorFilter(ColorFilter filter) {
paint.setColorFilter(filter);
return this;
}
/**
* Set or clear the xfermode object.
* <p/>
* Pass null to clear any previous xfermode.
*
* @param xfermode May be null. The xfermode to be installed in the paint
*/
public PaintBuilder setXfermode(Xfermode xfermode) {
paint.setXfermode(xfermode);
return this;
}
/**
* Set or clear the patheffect object.
* <p/>
* Pass null to clear any previous patheffect.
*
* @param effect May be null. The patheffect to be installed in the paint
*/
public PaintBuilder setPathEffect(PathEffect effect) {
paint.setPathEffect(effect);
return this;
}
/**
* Set or clear the maskfilter object.
* <p/>
* Pass null to clear any previous maskfilter.
*
* @param maskfilter May be null. The maskfilter to be installed in the
* paint
*/
public PaintBuilder setMaskFilter(MaskFilter maskfilter) {
paint.setMaskFilter(maskfilter);
return this;
}
/**
* Set or clear the typeface object.
* <p/>
* Pass null to clear any previous typeface.
*
* @param typeface May be null. The typeface to be installed in the paint
*/
public PaintBuilder setTypeface(Typeface typeface) {
paint.setTypeface(typeface);
return this;
}
/**
* This draws a shadow layer below the main layer, with the specified
* offset and color, and blur radius. If radius is 0, then the shadow
* layer is removed.
* <p/>
* Can be used to create a blurred shadow underneath text. Support for use
* with other drawing operations is constrained to the software rendering
* pipeline.
* <p/>
* The alpha of the shadow will be the paint's alpha if the shadow color is
* opaque, or the alpha from the shadow color if not.
*/
public PaintBuilder setShadowLayer(float radius, float dx, float dy, int shadowColor) {
paint.setShadowLayer(radius, dx, dy, shadowColor);
return this;
}
/**
* Set the paint's text alignment. This controls how the
* text is positioned relative to its origin. LEFT align means that all of
* the text will be drawn to the right of its origin (i.e. the origin
* specifieds the LEFT edge of the text) and so on.
*
* @param align set the paint's Align value for drawing text.
*/
public PaintBuilder setTextAlign(Paint.Align align) {
paint.setTextAlign(align);
return this;
}
/**
* Set the text locale.
* <p/>
* The text locale affects how the text is drawn for some languages.
* <p/>
* For example, if the locale is {@link Locale#CHINESE} or {@link Locale#CHINA},
* then the text renderer will prefer to draw text using a Chinese font. Likewise,
* if the locale is {@link Locale#JAPANESE} or {@link Locale#JAPAN}, then the text
* renderer will prefer to draw text using a Japanese font.
* <p/>
* This distinction is important because Chinese and Japanese text both use many
* of the same Unicode code points but their appearance is subtly different for
* each language.
* <p/>
* By default, the text locale is initialized to the system locale (as returned
* by {@link Locale#getDefault}). This assumes that the text to be rendered will
* most likely be in the user's preferred language.
* <p/>
* If the actual language of the text is known, then it can be provided to the
* text renderer using this method. The text renderer may attempt to guess the
* language script based on the contents of the text to be drawn independent of
* the text locale here. Specifying the text locale just helps it do a better
* job in certain ambiguous cases
*
* @param locale the paint's locale value for drawing text, must not be null.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
public PaintBuilder setTextLocale(Locale locale) {
paint.setTextLocale(locale);
return this;
}
/**
* Set the paint's elegant height metrics flag. This setting selects font
* variants that have not been compacted to fit Latin-based vertical
* metrics, and also increases top and bottom bounds to provide more space.
*
* @param elegant set the paint's elegant metrics flag for drawing text.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public PaintBuilder setElegantTextHeight(boolean elegant) {
paint.setElegantTextHeight(elegant);
return this;
}
/**
* Set the paint's text size. This value must be > 0
*
* @param textSize set the paint's text size.
*/
public PaintBuilder setTextSize(float textSize) {
paint.setTextSize(textSize);
return this;
}
/**
* Set the paint's horizontal scale factor for text. The default value
* is 1.0. Values > 1.0 will stretch the text wider. Values < 1.0 will
* stretch the text narrower.
*
* @param scaleX set the paint's scale in X for drawing/measuring text.
*/
public PaintBuilder setTextScaleX(float scaleX) {
paint.setTextScaleX(scaleX);
return this;
}
/**
* Set the paint's horizontal skew factor for text. The default value
* is 0. For approximating oblique text, use values around -0.25.
*
* @param skewX set the paint's skew factor in X for drawing text.
*/
public PaintBuilder setTextSkewX(float skewX) {
paint.setTextSkewX(skewX);
return this;
}
/**
* Set the paint's letter-spacing for text. The default value
* is 0. The value is in 'EM' units. Typical values for slight
* expansion will be around 0.05. Negative values tighten text.
*
* @param letterSpacing set the paint's letter-spacing for drawing text.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public PaintBuilder setLetterSpacing(float letterSpacing) {
paint.setLetterSpacing(letterSpacing);
return this;
}
/**
* Set font feature settings.
* <p/>
* The format is the same as the CSS font-feature-settings attribute:
* http://dev.w3.org/csswg/css-fonts/#propdef-font-feature-settings
*
* @param settings the font feature settings string to use, may be null.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public PaintBuilder setFontFeatureSettings(String settings) {
paint.setFontFeatureSettings(settings);
return this;
}
/**
* Get the Paint.
*
* @return built Paint instance
*/
public Paint build() {
return paint;
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.veracloud.jton.internal;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Comparator;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.NoSuchElementException;
import java.util.Set;
/**
* A map of comparable keys to values. Unlike {@code TreeMap}, this class uses
* insertion order for iteration order. Comparison order is only used as an
* optimization for efficient insertion and removal.
*
* <p>This implementation was derived from Android 4.1's TreeMap class.
*/
public final class LinkedTreeMap<K, V> extends AbstractMap<K, V> implements Serializable {
@SuppressWarnings({ "unchecked", "rawtypes" }) // to avoid Comparable<Comparable<Comparable<...>>>
private static final Comparator<Comparable> NATURAL_ORDER = new Comparator<Comparable>() {
public int compare(Comparable a, Comparable b) {
return a.compareTo(b);
}
};
Comparator<? super K> comparator;
Node<K, V> root;
int size = 0;
int modCount = 0;
// Used to preserve iteration order
final Node<K, V> header = new Node<K, V>();
/**
* Create a natural order, empty tree map whose keys must be mutually
* comparable and non-null.
*/
@SuppressWarnings("unchecked") // unsafe! this assumes K is comparable
public LinkedTreeMap() {
this((Comparator<? super K>) NATURAL_ORDER);
}
/**
* Create a tree map ordered by {@code comparator}. This map's keys may only
* be null if {@code comparator} permits.
*
* @param comparator the comparator to order elements with, or {@code null} to
* use the natural ordering.
*/
@SuppressWarnings({ "unchecked", "rawtypes" }) // unsafe! if comparator is null, this assumes K is comparable
public LinkedTreeMap(Comparator<? super K> comparator) {
this.comparator = comparator != null
? comparator
: (Comparator) NATURAL_ORDER;
}
@Override public int size() {
return size;
}
@Override public V get(Object key) {
Node<K, V> node = findByObject(key);
return node != null ? node.value : null;
}
@Override public boolean containsKey(Object key) {
return findByObject(key) != null;
}
@Override public V put(K key, V value) {
if (key == null) {
throw new NullPointerException("key == null");
}
Node<K, V> created = find(key, true);
V result = created.value;
created.value = value;
return result;
}
@Override public void clear() {
root = null;
size = 0;
modCount++;
// Clear iteration order
Node<K, V> header = this.header;
header.next = header.prev = header;
}
@Override public V remove(Object key) {
Node<K, V> node = removeInternalByKey(key);
return node != null ? node.value : null;
}
/**
* Returns the node at or adjacent to the given key, creating it if requested.
*
* @throws ClassCastException if {@code key} and the tree's keys aren't
* mutually comparable.
*/
Node<K, V> find(K key, boolean create) {
Comparator<? super K> comparator = this.comparator;
Node<K, V> nearest = root;
int comparison = 0;
if (nearest != null) {
// Micro-optimization: avoid polymorphic calls to Comparator.compare().
@SuppressWarnings("unchecked") // Throws a ClassCastException below if there's trouble.
Comparable<Object> comparableKey = (comparator == NATURAL_ORDER)
? (Comparable<Object>) key
: null;
while (true) {
comparison = (comparableKey != null)
? comparableKey.compareTo(nearest.key)
: comparator.compare(key, nearest.key);
// We found the requested key.
if (comparison == 0) {
return nearest;
}
// If it exists, the key is in a subtree. Go deeper.
Node<K, V> child = (comparison < 0) ? nearest.left : nearest.right;
if (child == null) {
break;
}
nearest = child;
}
}
// The key doesn't exist in this tree.
if (!create) {
return null;
}
// Create the node and add it to the tree or the table.
Node<K, V> header = this.header;
Node<K, V> created;
if (nearest == null) {
// Check that the value is comparable if we didn't do any comparisons.
if (comparator == NATURAL_ORDER && !(key instanceof Comparable)) {
throw new ClassCastException(key.getClass().getName() + " is not Comparable");
}
created = new Node<K, V>(nearest, key, header, header.prev);
root = created;
} else {
created = new Node<K, V>(nearest, key, header, header.prev);
if (comparison < 0) { // nearest.key is higher
nearest.left = created;
} else { // comparison > 0, nearest.key is lower
nearest.right = created;
}
rebalance(nearest, true);
}
size++;
modCount++;
return created;
}
@SuppressWarnings("unchecked")
Node<K, V> findByObject(Object key) {
try {
return key != null ? find((K) key, false) : null;
} catch (ClassCastException e) {
return null;
}
}
/**
* Returns this map's entry that has the same key and value as {@code
* entry}, or null if this map has no such entry.
*
* <p>This method uses the comparator for key equality rather than {@code
* equals}. If this map's comparator isn't consistent with equals (such as
* {@code String.CASE_INSENSITIVE_ORDER}), then {@code remove()} and {@code
* contains()} will violate the collections API.
*/
Node<K, V> findByEntry(Entry<?, ?> entry) {
Node<K, V> mine = findByObject(entry.getKey());
boolean valuesEqual = mine != null && equal(mine.value, entry.getValue());
return valuesEqual ? mine : null;
}
private boolean equal(Object a, Object b) {
return a == b || (a != null && a.equals(b));
}
/**
* Removes {@code node} from this tree, rearranging the tree's structure as
* necessary.
*
* @param unlink true to also unlink this node from the iteration linked list.
*/
void removeInternal(Node<K, V> node, boolean unlink) {
if (unlink) {
node.prev.next = node.next;
node.next.prev = node.prev;
}
Node<K, V> left = node.left;
Node<K, V> right = node.right;
Node<K, V> originalParent = node.parent;
if (left != null && right != null) {
/*
* To remove a node with both left and right subtrees, move an
* adjacent node from one of those subtrees into this node's place.
*
* Removing the adjacent node may change this node's subtrees. This
* node may no longer have two subtrees once the adjacent node is
* gone!
*/
Node<K, V> adjacent = (left.height > right.height) ? left.last() : right.first();
removeInternal(adjacent, false); // takes care of rebalance and size--
int leftHeight = 0;
left = node.left;
if (left != null) {
leftHeight = left.height;
adjacent.left = left;
left.parent = adjacent;
node.left = null;
}
int rightHeight = 0;
right = node.right;
if (right != null) {
rightHeight = right.height;
adjacent.right = right;
right.parent = adjacent;
node.right = null;
}
adjacent.height = Math.max(leftHeight, rightHeight) + 1;
replaceInParent(node, adjacent);
return;
} else if (left != null) {
replaceInParent(node, left);
node.left = null;
} else if (right != null) {
replaceInParent(node, right);
node.right = null;
} else {
replaceInParent(node, null);
}
rebalance(originalParent, false);
size--;
modCount++;
}
Node<K, V> removeInternalByKey(Object key) {
Node<K, V> node = findByObject(key);
if (node != null) {
removeInternal(node, true);
}
return node;
}
private void replaceInParent(Node<K, V> node, Node<K, V> replacement) {
Node<K, V> parent = node.parent;
node.parent = null;
if (replacement != null) {
replacement.parent = parent;
}
if (parent != null) {
if (parent.left == node) {
parent.left = replacement;
} else {
assert (parent.right == node);
parent.right = replacement;
}
} else {
root = replacement;
}
}
/**
* Rebalances the tree by making any AVL rotations necessary between the
* newly-unbalanced node and the tree's root.
*
* @param insert true if the node was unbalanced by an insert; false if it
* was by a removal.
*/
private void rebalance(Node<K, V> unbalanced, boolean insert) {
for (Node<K, V> node = unbalanced; node != null; node = node.parent) {
Node<K, V> left = node.left;
Node<K, V> right = node.right;
int leftHeight = left != null ? left.height : 0;
int rightHeight = right != null ? right.height : 0;
int delta = leftHeight - rightHeight;
if (delta == -2) {
Node<K, V> rightLeft = right.left;
Node<K, V> rightRight = right.right;
int rightRightHeight = rightRight != null ? rightRight.height : 0;
int rightLeftHeight = rightLeft != null ? rightLeft.height : 0;
int rightDelta = rightLeftHeight - rightRightHeight;
if (rightDelta == -1 || (rightDelta == 0 && !insert)) {
rotateLeft(node); // AVL right right
} else {
assert (rightDelta == 1);
rotateRight(right); // AVL right left
rotateLeft(node);
}
if (insert) {
break; // no further rotations will be necessary
}
} else if (delta == 2) {
Node<K, V> leftLeft = left.left;
Node<K, V> leftRight = left.right;
int leftRightHeight = leftRight != null ? leftRight.height : 0;
int leftLeftHeight = leftLeft != null ? leftLeft.height : 0;
int leftDelta = leftLeftHeight - leftRightHeight;
if (leftDelta == 1 || (leftDelta == 0 && !insert)) {
rotateRight(node); // AVL left left
} else {
assert (leftDelta == -1);
rotateLeft(left); // AVL left right
rotateRight(node);
}
if (insert) {
break; // no further rotations will be necessary
}
} else if (delta == 0) {
node.height = leftHeight + 1; // leftHeight == rightHeight
if (insert) {
break; // the insert caused balance, so rebalancing is done!
}
} else {
assert (delta == -1 || delta == 1);
node.height = Math.max(leftHeight, rightHeight) + 1;
if (!insert) {
break; // the height hasn't changed, so rebalancing is done!
}
}
}
}
/**
* Rotates the subtree so that its root's right child is the new root.
*/
private void rotateLeft(Node<K, V> root) {
Node<K, V> left = root.left;
Node<K, V> pivot = root.right;
Node<K, V> pivotLeft = pivot.left;
Node<K, V> pivotRight = pivot.right;
// move the pivot's left child to the root's right
root.right = pivotLeft;
if (pivotLeft != null) {
pivotLeft.parent = root;
}
replaceInParent(root, pivot);
// move the root to the pivot's left
pivot.left = root;
root.parent = pivot;
// fix heights
root.height = Math.max(left != null ? left.height : 0,
pivotLeft != null ? pivotLeft.height : 0) + 1;
pivot.height = Math.max(root.height,
pivotRight != null ? pivotRight.height : 0) + 1;
}
/**
* Rotates the subtree so that its root's left child is the new root.
*/
private void rotateRight(Node<K, V> root) {
Node<K, V> pivot = root.left;
Node<K, V> right = root.right;
Node<K, V> pivotLeft = pivot.left;
Node<K, V> pivotRight = pivot.right;
// move the pivot's right child to the root's left
root.left = pivotRight;
if (pivotRight != null) {
pivotRight.parent = root;
}
replaceInParent(root, pivot);
// move the root to the pivot's right
pivot.right = root;
root.parent = pivot;
// fixup heights
root.height = Math.max(right != null ? right.height : 0,
pivotRight != null ? pivotRight.height : 0) + 1;
pivot.height = Math.max(root.height,
pivotLeft != null ? pivotLeft.height : 0) + 1;
}
private EntrySet entrySet;
private KeySet keySet;
@Override public Set<Entry<K, V>> entrySet() {
EntrySet result = entrySet;
return result != null ? result : (entrySet = new EntrySet());
}
@Override public Set<K> keySet() {
KeySet result = keySet;
return result != null ? result : (keySet = new KeySet());
}
static final class Node<K, V> implements Entry<K, V> {
Node<K, V> parent;
Node<K, V> left;
Node<K, V> right;
Node<K, V> next;
Node<K, V> prev;
final K key;
V value;
int height;
/** Create the header entry */
Node() {
key = null;
next = prev = this;
}
/** Create a regular entry */
Node(Node<K, V> parent, K key, Node<K, V> next, Node<K, V> prev) {
this.parent = parent;
this.key = key;
this.height = 1;
this.next = next;
this.prev = prev;
prev.next = this;
next.prev = this;
}
public K getKey() {
return key;
}
public V getValue() {
return value;
}
public V setValue(V value) {
V oldValue = this.value;
this.value = value;
return oldValue;
}
@SuppressWarnings("rawtypes")
@Override public boolean equals(Object o) {
if (o instanceof Entry) {
Entry other = (Entry) o;
return (key == null ? other.getKey() == null : key.equals(other.getKey()))
&& (value == null ? other.getValue() == null : value.equals(other.getValue()));
}
return false;
}
@Override public int hashCode() {
return (key == null ? 0 : key.hashCode())
^ (value == null ? 0 : value.hashCode());
}
@Override public String toString() {
return key + "=" + value;
}
/**
* Returns the first node in this subtree.
*/
public Node<K, V> first() {
Node<K, V> node = this;
Node<K, V> child = node.left;
while (child != null) {
node = child;
child = node.left;
}
return node;
}
/**
* Returns the last node in this subtree.
*/
public Node<K, V> last() {
Node<K, V> node = this;
Node<K, V> child = node.right;
while (child != null) {
node = child;
child = node.right;
}
return node;
}
}
private abstract class LinkedTreeMapIterator<T> implements Iterator<T> {
Node<K, V> next = header.next;
Node<K, V> lastReturned = null;
int expectedModCount = modCount;
public final boolean hasNext() {
return next != header;
}
final Node<K, V> nextNode() {
Node<K, V> e = next;
if (e == header) {
throw new NoSuchElementException();
}
if (modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
next = e.next;
return lastReturned = e;
}
public final void remove() {
if (lastReturned == null) {
throw new IllegalStateException();
}
removeInternal(lastReturned, true);
lastReturned = null;
expectedModCount = modCount;
}
}
class EntrySet extends AbstractSet<Entry<K, V>> {
@Override public int size() {
return size;
}
@Override public Iterator<Entry<K, V>> iterator() {
return new LinkedTreeMapIterator<Entry<K, V>>() {
public Entry<K, V> next() {
return nextNode();
}
};
}
@Override public boolean contains(Object o) {
return o instanceof Entry && findByEntry((Entry<?, ?>) o) != null;
}
@Override public boolean remove(Object o) {
if (!(o instanceof Entry)) {
return false;
}
Node<K, V> node = findByEntry((Entry<?, ?>) o);
if (node == null) {
return false;
}
removeInternal(node, true);
return true;
}
@Override public void clear() {
LinkedTreeMap.this.clear();
}
}
final class KeySet extends AbstractSet<K> {
@Override public int size() {
return size;
}
@Override public Iterator<K> iterator() {
return new LinkedTreeMapIterator<K>() {
public K next() {
return nextNode().key;
}
};
}
@Override public boolean contains(Object o) {
return containsKey(o);
}
@Override public boolean remove(Object key) {
return removeInternalByKey(key) != null;
}
@Override public void clear() {
LinkedTreeMap.this.clear();
}
}
/**
* If somebody is unlucky enough to have to serialize one of these, serialize
* it as a LinkedHashMap so that they won't need Gson on the other side to
* deserialize it. Using serialization defeats our DoS defence, so most apps
* shouldn't use it.
*/
private Object writeReplace() throws ObjectStreamException {
return new LinkedHashMap<K, V>(this);
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.airavata.model.error;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
/**
* 1: optional string identifier,
* 2: optional string key
*
*/
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
public class ProjectNotFoundException extends TException implements org.apache.thrift.TBase<ProjectNotFoundException, ProjectNotFoundException._Fields>, java.io.Serializable, Cloneable, Comparable<ProjectNotFoundException> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ProjectNotFoundException");
private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new ProjectNotFoundExceptionStandardSchemeFactory());
schemes.put(TupleScheme.class, new ProjectNotFoundExceptionTupleSchemeFactory());
}
private String message; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
MESSAGE((short)1, "message");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // MESSAGE
return MESSAGE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.MESSAGE, new org.apache.thrift.meta_data.FieldMetaData("message", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ProjectNotFoundException.class, metaDataMap);
}
public ProjectNotFoundException() {
}
public ProjectNotFoundException(
String message)
{
this();
this.message = message;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ProjectNotFoundException(ProjectNotFoundException other) {
if (other.isSetMessage()) {
this.message = other.message;
}
}
public ProjectNotFoundException deepCopy() {
return new ProjectNotFoundException(this);
}
@Override
public void clear() {
this.message = null;
}
public String getMessage() {
return this.message;
}
public void setMessage(String message) {
this.message = message;
}
public void unsetMessage() {
this.message = null;
}
/** Returns true if field message is set (has been assigned a value) and false otherwise */
public boolean isSetMessage() {
return this.message != null;
}
public void setMessageIsSet(boolean value) {
if (!value) {
this.message = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case MESSAGE:
if (value == null) {
unsetMessage();
} else {
setMessage((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case MESSAGE:
return getMessage();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case MESSAGE:
return isSetMessage();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof ProjectNotFoundException)
return this.equals((ProjectNotFoundException)that);
return false;
}
public boolean equals(ProjectNotFoundException that) {
if (that == null)
return false;
boolean this_present_message = true && this.isSetMessage();
boolean that_present_message = true && that.isSetMessage();
if (this_present_message || that_present_message) {
if (!(this_present_message && that_present_message))
return false;
if (!this.message.equals(that.message))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_message = true && (isSetMessage());
list.add(present_message);
if (present_message)
list.add(message);
return list.hashCode();
}
@Override
public int compareTo(ProjectNotFoundException other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetMessage()).compareTo(other.isSetMessage());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMessage()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.message, other.message);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ProjectNotFoundException(");
boolean first = true;
sb.append("message:");
if (this.message == null) {
sb.append("null");
} else {
sb.append(this.message);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetMessage()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'message' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ProjectNotFoundExceptionStandardSchemeFactory implements SchemeFactory {
public ProjectNotFoundExceptionStandardScheme getScheme() {
return new ProjectNotFoundExceptionStandardScheme();
}
}
private static class ProjectNotFoundExceptionStandardScheme extends StandardScheme<ProjectNotFoundException> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ProjectNotFoundException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // MESSAGE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.message = iprot.readString();
struct.setMessageIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ProjectNotFoundException struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.message != null) {
oprot.writeFieldBegin(MESSAGE_FIELD_DESC);
oprot.writeString(struct.message);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ProjectNotFoundExceptionTupleSchemeFactory implements SchemeFactory {
public ProjectNotFoundExceptionTupleScheme getScheme() {
return new ProjectNotFoundExceptionTupleScheme();
}
}
private static class ProjectNotFoundExceptionTupleScheme extends TupleScheme<ProjectNotFoundException> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, ProjectNotFoundException struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeString(struct.message);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, ProjectNotFoundException struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.message = iprot.readString();
struct.setMessageIsSet(true);
}
}
}
|
|
package mil.nga.giat.mage.sdk.datastore.user;
import android.content.Context;
import android.util.Log;
import com.j256.ormlite.dao.Dao;
import com.j256.ormlite.stmt.DeleteBuilder;
import com.j256.ormlite.stmt.PreparedQuery;
import com.j256.ormlite.stmt.QueryBuilder;
import com.j256.ormlite.stmt.UpdateBuilder;
import com.j256.ormlite.stmt.Where;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import mil.nga.giat.mage.sdk.datastore.DaoHelper;
import mil.nga.giat.mage.sdk.event.IEventDispatcher;
import mil.nga.giat.mage.sdk.event.IEventEventListener;
import mil.nga.giat.mage.sdk.event.IUserDispatcher;
import mil.nga.giat.mage.sdk.event.IUserEventListener;
import mil.nga.giat.mage.sdk.exceptions.UserException;
/**
* A utility class for accessing {@link User} data from the physical data model.
* The details of ORM DAOs and Lazy Loading should not be exposed past this
* class.
*/
public class UserHelper extends DaoHelper<User> implements IEventDispatcher<IEventEventListener>, IUserDispatcher {
private static final String LOG_NAME = UserHelper.class.getName();
private final Dao<User, Long> userDao;
private final Dao<UserLocal, Long> userLocalDao;
private final Dao<UserTeam, Long> userTeamDao;
private final Dao<TeamEvent, Long> teamEventDao;
private static final Collection<IUserEventListener> userListeners = new CopyOnWriteArrayList<>();
private static final Collection<IEventEventListener> eventListeners = new CopyOnWriteArrayList<>();
/**
* Singleton.
*/
private static UserHelper mUserHelper;
/**
* Use of a Singleton here ensures that an excessive amount of DAOs are not
* created.
*
* @param context
* Application Context
* @return A fully constructed and operational UserHelper.
*/
public static UserHelper getInstance(Context context) {
if (mUserHelper == null) {
mUserHelper = new UserHelper(context);
}
return mUserHelper;
}
/**
* Only one-per JVM. Singleton.
*
* @param context context
*/
private UserHelper(Context context) {
super(context);
try {
userDao = daoStore.getUserDao();
userLocalDao = daoStore.getUserLocalDao();
userTeamDao = daoStore.getUserTeamDao();
teamEventDao = daoStore.getTeamEventDao();
} catch (SQLException sqle) {
Log.e(LOG_NAME, "Unable to communicate with User database.", sqle);
throw new IllegalStateException("Unable to communicate with User database.", sqle);
}
}
// FIXME : should add user to team if needed
@Override
public User create(User user) throws UserException {
User createdUser;
try {
UserLocal userLocal = userLocalDao.createIfNotExists(new UserLocal());
user.setUserLocal(userLocal);
createdUser = userDao.createIfNotExists(user);
} catch (SQLException sqle) {
Log.e(LOG_NAME, "There was a problem creating user: " + user, sqle);
throw new UserException("There was a problem creating user: " + user, sqle);
}
for (IUserEventListener listener : userListeners) {
listener.onUserCreated(createdUser);
}
return createdUser;
}
@Override
public User read(Long id) throws UserException {
try {
return userDao.queryForId(id);
} catch (SQLException sqle) {
Log.e(LOG_NAME, "Unable to query for existence for id = '" + id + "'", sqle);
throw new UserException("Unable to query for existence for id = '" + id + "'", sqle);
}
}
@Override
public User read(String remoteId) throws UserException {
User user = null;
try {
List<User> results = userDao.queryBuilder().where().eq("remote_id", remoteId).query();
if (results != null && results.size() > 0) {
user = results.get(0);
}
} catch (SQLException sqle) {
Log.e(LOG_NAME, "Unable to query for existence for remote_id = '" + remoteId + "'", sqle);
throw new UserException("Unable to query for existence for remote_id = '" + remoteId + "'", sqle);
}
return user;
}
public List<User> read(Collection<String> remoteIds) throws UserException {
try {
return userDao.queryBuilder().where().in("remote_id", remoteIds).query();
} catch (SQLException sqle) {
Log.e(LOG_NAME, "Unable to query for existence for remote_ids = '" + remoteIds + "'", sqle);
throw new UserException("Unable to query for existence for remote_ids = '" + remoteIds.toString() + "'", sqle);
}
}
public User readCurrentUser() throws UserException {
User user;
try {
QueryBuilder<UserLocal, Long> userLocalQuery = userLocalDao.queryBuilder();
userLocalQuery.selectColumns(UserLocal.COLUMN_NAME_ID);
Where<UserLocal, Long> where = userLocalQuery.where();
where.eq(UserLocal.COLUMN_NAME_CURRENT_USER, Boolean.TRUE);
QueryBuilder<User, Long> userQuery = userDao.queryBuilder();
userQuery.where().in(User.COLUMN_NAME_USER_LOCAL_ID, userLocalQuery);
PreparedQuery<User> preparedQuery = userQuery.prepare();
user = userDao.queryForFirst(preparedQuery);
} catch (SQLException sqle) {
Log.e(LOG_NAME, "There was a problem reading active users.");
throw new UserException("There was a problem reading active users.", sqle);
}
return user;
}
public boolean isCurrentUserPartOfEvent(Event event) {
boolean status = false;
try {
status = EventHelper.getInstance(mApplicationContext).getEventsForCurrentUser().contains(event);
} catch(Exception e) {
Log.e(LOG_NAME, "Problem getting user or event.");
}
return status;
}
public boolean isCurrentUserPartOfCurrentEvent() {
boolean status = false;
try {
User user = readCurrentUser();
status = isCurrentUserPartOfEvent(user.getCurrentEvent());
} catch (Exception e) {
Log.e(LOG_NAME, "Problem getting user or event.");
}
return status;
}
@Override
public User update(User user) throws UserException {
try {
User oldUser = read(user.getId());
user.setUserLocal(oldUser.getUserLocal());
userDao.update(user);
} catch (SQLException sqle) {
Log.e(LOG_NAME, "There was a problem creating user: " + user);
throw new UserException("There was a problem creating user: " + user, sqle);
}
for (IUserEventListener listener : userListeners) {
listener.onUserUpdated(user);
}
return user;
}
public User createOrUpdate(User user) {
try {
QueryBuilder<User, Long> db = userDao.queryBuilder();
db.where().eq(User.COLUMN_NAME_USERNAME, user.getUsername());
User oldUser = db.queryForFirst();
if (oldUser == null) {
user = create(user);
Log.d(LOG_NAME, "Created user with remote_id " + user.getRemoteId());
} else {
// perform update?
user.setId(oldUser.getId());
user.setUserLocal(oldUser.getUserLocal());
userDao.update(user);
Log.d(LOG_NAME, "Updated user with remote_id " + user.getRemoteId());
for (IUserEventListener listener : userListeners) {
listener.onUserUpdated(user);
}
}
} catch (Exception ue) {
Log.e(LOG_NAME, "There was a problem reading user: " + user, ue);
}
return user;
}
public User setCurrentUser(User user) throws UserException {
try {
clearCurrentUser();
UpdateBuilder<UserLocal, Long> builder = userLocalDao.updateBuilder();
builder.where().idEq(user.getUserLocal().getId());
builder.updateColumnValue(UserLocal.COLUMN_NAME_CURRENT_USER, true);
builder.update();
userDao.refresh(user);
} catch (SQLException sqle) {
Log.e(LOG_NAME, "Unable to update user '" + user.getDisplayName() + "' to current user" , sqle);
throw new UserException("Unable to update UserLocal table", sqle);
}
return user;
}
public User setCurrentEvent(User user, Event event) throws UserException {
try {
UpdateBuilder<UserLocal, Long> builder = userLocalDao.updateBuilder();
builder.where().idEq(user.getUserLocal().getId());
builder.updateColumnValue(UserLocal.COLUMN_NAME_CURRENT_EVENT, event);
// check if we need to send event onChange
UserLocal userLocal = user.getUserLocal();
if (userLocal.isCurrentUser()) {
String oldEventRemoteId = null;
if (userLocal.getCurrentEvent() != null) {
oldEventRemoteId = userLocal.getCurrentEvent().getRemoteId();
}
String newEventRemoteId = event != null ? event.getRemoteId() : null;
// run update before firing event to make sure update works.
builder.update();
if (oldEventRemoteId == null ^ newEventRemoteId == null) {
for (IEventEventListener listener : eventListeners) {
listener.onEventChanged();
}
} else if (oldEventRemoteId != null && newEventRemoteId != null) {
if (!oldEventRemoteId.equals(newEventRemoteId)) {
for (IEventEventListener listener : eventListeners) {
listener.onEventChanged();
}
}
}
userDao.refresh(user);
}
} catch (SQLException sqle) {
Log.e(LOG_NAME, "Unable to update users '" + user.getDisplayName() + "' current event" , sqle);
throw new UserException("Unable to update UserLocal table", sqle);
}
return user;
}
public User removeCurrentEvent(User user) throws UserException {
if (user == null || user.getUserLocal() == null) {
return user;
}
try {
UpdateBuilder<UserLocal, Long> builder = userLocalDao.updateBuilder();
builder.where().idEq(user.getUserLocal().getId());
builder.updateColumnValue(UserLocal.COLUMN_NAME_CURRENT_EVENT, null);
builder.update();
userDao.refresh(user);
} catch (SQLException e) {
Log.e(LOG_NAME, "Unable to clear current event for user '" + user.getDisplayName() + "'");
throw new UserException("Unable to update UserLocal table", e);
}
return user;
}
public User setAvatarPath(User user, String path) throws UserException {
try {
UpdateBuilder<UserLocal, Long> builder = userLocalDao.updateBuilder();
builder.where().idEq(user.getUserLocal().getId());
builder.updateColumnValue(UserLocal.COLUMN_NAME_AVATAR_PATH, path);
builder.update();
userLocalDao.refresh(user.getUserLocal());
} catch (SQLException sqle) {
Log.e(LOG_NAME, "Unable to update users '" + user.getDisplayName() + "' avatar path" , sqle);
throw new UserException("Unable to update UserLocal table", sqle);
}
for (IUserEventListener listener : userListeners) {
listener.onUserAvatarUpdated(user);
}
return user;
}
public User setIconPath(User user, String path) throws UserException {
try {
UpdateBuilder<UserLocal, Long> builder = userLocalDao.updateBuilder();
builder.where().idEq(user.getUserLocal().getId());
builder.updateColumnValue(UserLocal.COLUMN_NAME_ICON_PATH, path);
builder.update();
userLocalDao.refresh(user.getUserLocal());
} catch (SQLException sqle) {
Log.e(LOG_NAME, "Unable to update users '" + user.getDisplayName() + "' icon path" , sqle);
throw new UserException("Unable to update UserLocal table", sqle);
}
for (IUserEventListener listener : userListeners) {
listener.onUserIconUpdated(user);
}
return user;
}
private void clearCurrentUser() throws UserException {
try {
UpdateBuilder<UserLocal, Long> builder = userLocalDao.updateBuilder();
builder.updateColumnValue(UserLocal.COLUMN_NAME_CURRENT_USER, Boolean.FALSE);
builder.update();
} catch (SQLException sqle) {
Log.e(LOG_NAME, "There was a problem deleting active userlocal.", sqle);
throw new UserException("There was a problem deleting active userlocal.", sqle);
}
}
public void deleteUserTeams() {
try {
DeleteBuilder<UserTeam, Long> db = userTeamDao.deleteBuilder();
db.delete();
} catch (SQLException sqle) {
Log.e(LOG_NAME, "There was a problem deleting userteams.", sqle);
}
}
public UserTeam create(UserTeam userTeam) {
UserTeam createdUserTeam = null;
try {
createdUserTeam = userTeamDao.createIfNotExists(userTeam);
} catch (SQLException sqle) {
Log.e(LOG_NAME, "There was a problem creating userteam: " + userTeam, sqle);
}
return createdUserTeam;
}
public Collection<User> getUsersInEvent(Event event) {
Collection<User> users = new ArrayList<>();
try {
QueryBuilder<TeamEvent, Long> teamEventQuery = teamEventDao.queryBuilder();
teamEventQuery.selectColumns("team_id");
Where<TeamEvent, Long> teamEventWhere = teamEventQuery.where();
teamEventWhere.eq("event_id", event.getId());
QueryBuilder<UserTeam, Long> userTeamQuery = userTeamDao.queryBuilder();
userTeamQuery.selectColumns("user_id");
Where<UserTeam, Long> userTeamWhere = userTeamQuery.where();
userTeamWhere.in("team_id", teamEventQuery);
QueryBuilder<User, Long> teamQuery = userDao.queryBuilder();
teamQuery.where().in("_id", userTeamQuery);
users = teamQuery.query();
if (users == null) {
users = new ArrayList<>();
}
} catch (SQLException sqle) {
Log.e(LOG_NAME, "Error getting users for event: " + event, sqle);
}
return users;
}
@Override
public boolean addListener(IEventEventListener listener) {
return eventListeners.add(listener);
}
@Override
public boolean removeListener(IEventEventListener listener) {
return eventListeners.remove(listener);
}
@Override
public boolean addListener(IUserEventListener listener) {
return userListeners.add(listener);
}
@Override
public boolean removeListener(IUserEventListener listener) {
return userListeners.add(listener);
}
}
|
|
package shadow.typecheck.type;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Set;
import shadow.ShadowException;
import shadow.parse.ShadowParser;
public class ArrayType extends ClassType
{
private final Type baseType;
private final boolean nullable;
private ClassType genericVersion = null;
@Override
public int getWidth()
{
return 6;
//not necessarily the actual width, just a value that helps sort the fields
//references have a "width" of 6, which covers either 4 or 8 byte pointers
// interfaces first
// then longs and ulongs
// then regular references and arrays
// then smaller primitives
}
public Type recursivelyGetBaseType() {
if (baseType instanceof ArrayType)
return ((ArrayType)baseType).recursivelyGetBaseType();
return baseType;
}
public Type getBaseType() {
return baseType;
}
public ArrayType(Type baseType) {
this(baseType, false);
}
protected static Type getLowestBase(Type type) {
if( type instanceof ArrayType )
return ((ArrayType)type).recursivelyGetBaseType();
return type;
}
public ArrayType(Type baseType, boolean nullable ) {
this( baseType, 1, nullable );
}
public ArrayType(Type baseType, int dimensions, boolean nullable ) {
super( getLowestBase(baseType).getTypeName(), new Modifiers(baseType.getModifiers().getModifiers() & ~Modifiers.IMMUTABLE), baseType.getDocumentation(), baseType.getOuter() );
setExtendType(Type.OBJECT);
if( dimensions == 1 )
this.baseType = baseType;
else
this.baseType = new ArrayType( baseType, dimensions - 1, nullable);
if( baseType.isParameterized() )
setParameterized(true);
this.nullable = nullable;
}
@Override
public String toString(int options) {
if( (options & MANGLE) != 0 ) {
String baseName = baseType.isPrimitive() ? baseType.getTypeName() : baseType.toString(options);
if( nullable )
return baseName + "_NA";
else
return baseName + "_A";
}
boolean printNullable = nullable && (options & NO_NULLABLE) == 0 && !(baseType instanceof ArrayType);
return (printNullable ? "nullable " : "" ) + baseType.toString(options) + "[]";
}
@Override
public SequenceType getTypeParameters() {
return baseType.getTypeParameters();
}
@Override
public boolean equals(Type type) {
if( type == Type.NULL )
return false;
if( type instanceof ArrayType )
{
ArrayType other = (ArrayType)type;
if( nullable == other.nullable )
return baseType.equals(other.baseType);
}
return false;
}
@Override
public MethodSignature getMatchingMethod(String methodName, SequenceType arguments, SequenceType typeArguments, List<ShadowException> errors ) {
return convertToGeneric().getMatchingMethod(methodName, arguments, typeArguments, errors);
}
@Override
public List<MethodSignature> recursivelyGetMethodOverloads(String methodName) {
return convertToGeneric().recursivelyGetMethodOverloads(methodName);
}
@Override
public boolean containsField(String fieldName) {
return convertToGeneric().containsField(fieldName);
}
@Override
public ShadowParser.VariableDeclaratorContext getField(String fieldName) {
return convertToGeneric().getField(fieldName);
}
@Override
public LinkedHashMap<String, ShadowParser.VariableDeclaratorContext> getFields() {
return convertToGeneric().getFields();
}
@Override
public boolean hasInterface(InterfaceType type)
{
return convertToGeneric().hasInterface(type);
}
@Override
public boolean hasUninstantiatedInterface(InterfaceType type)
{
return convertToGeneric().hasUninstantiatedInterface(type);
}
@Override
public boolean isSubtype(Type t) {
if( t == UNKNOWN )
return false;
if( t == OBJECT )
return true;
if( equals(t) )
return true;
if( t instanceof ArrayType ) {
ArrayType type = (ArrayType)this;
ArrayType other = (ArrayType)t;
//invariant subtyping on arrays
if( type.nullable == other.nullable )
return type.getBaseType().equals(other.getBaseType());
else
return false;
}
//check generic version
return convertToGeneric().isSubtype(t);
}
@Override
public ArrayType replace(List<ModifiedType> values, List<ModifiedType> replacements ) throws InstantiationException
{
return new ArrayType( baseType.replace(values, replacements), nullable);
}
public ClassType convertToGeneric() {
if( genericVersion == null ) {
Type base = baseType;
try {
if( nullable )
genericVersion = Type.ARRAY_NULLABLE.replace(Type.ARRAY_NULLABLE.getTypeParameters(), new SequenceType(base));
else
genericVersion = Type.ARRAY.replace(Type.ARRAY.getTypeParameters(), new SequenceType(base));
}
catch(InstantiationException e)
{}
}
return genericVersion; //shouldn't be null if instantiation succeeded
}
public ArrayType convertToNullable() {
if( nullable )
return this;
else
return new ArrayType( baseType, true);
}
public boolean isNullable() {
return nullable;
}
public ArrayType instantiate() throws InstantiationException {
if( recursivelyGetBaseType() instanceof UninstantiatedType ) {
if( baseType instanceof UninstantiatedType )
return new ArrayType(((UninstantiatedType)baseType).instantiate(), nullable);
//must be an array of arrays
else
return new ArrayType(((ArrayType)baseType).instantiate(), nullable);
}
else
return this;
}
@Override
public boolean isRecursivelyParameterized() {
return baseType.isRecursivelyParameterized();
}
@Override
public boolean isFullyInstantiated() {
return baseType.isFullyInstantiated();
}
public boolean containsUnboundTypeParameters() {
if( baseType instanceof TypeParameter )
return true;
if( baseType.isParameterized() && !baseType.isFullyInstantiated() )
return true;
if( baseType instanceof ArrayType )
return ((ArrayType)baseType).containsUnboundTypeParameters();
return false;
}
@Override
protected boolean onlyUsesTypeParametersFrom(Type type) {
return convertToGeneric().onlyUsesTypeParametersFrom(type);
}
// Returns true if this uses no parameters or only these parameters
@Override
protected boolean onlyUsesTheseParameters(Set<TypeParameter> parameters) {
return convertToGeneric().onlyUsesTheseParameters(parameters);
}
}
|
|
/*
* Copyright 2015-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codeaddslife.koara;
import java.util.Stack;
import com.codeaddslife.koara.ast.BlockElement;
import com.codeaddslife.koara.ast.BlockQuote;
import com.codeaddslife.koara.ast.Code;
import com.codeaddslife.koara.ast.CodeBlock;
import com.codeaddslife.koara.ast.Document;
import com.codeaddslife.koara.ast.Em;
import com.codeaddslife.koara.ast.Heading;
import com.codeaddslife.koara.ast.Image;
import com.codeaddslife.koara.ast.LineBreak;
import com.codeaddslife.koara.ast.Link;
import com.codeaddslife.koara.ast.ListBlock;
import com.codeaddslife.koara.ast.ListItem;
import com.codeaddslife.koara.ast.Paragraph;
import com.codeaddslife.koara.ast.Strong;
import com.codeaddslife.koara.ast.Text;
/** This class is here, because testing against HTML is easier */
public class Html5Renderer implements Renderer {
private StringBuffer out;
private int level;
private Stack<Integer> listSequence = new Stack<Integer>();
private boolean hardWrap;
public void visit(Document node) {
out = new StringBuffer();
node.childrenAccept(this);
}
public void visit(Heading node) {
out.append(indent() + "<h" + node.getValue() + ">");
node.childrenAccept(this);
out.append("</h" + node.getValue() + ">\n");
if(!node.isNested()) { out.append("\n"); }
}
public void visit(BlockQuote node) {
out.append(indent() + "<blockquote>");
if(node.getChildren() != null && node.getChildren().length > 0) { out.append("\n"); }
level++;
node.childrenAccept(this);
level--;
out.append(indent() + "</blockquote>\n");
if(!node.isNested()) { out.append("\n"); }
}
public void visit(ListBlock node) {
listSequence.push(0);
String tag = node.isOrdered() ? "ol" : "ul";
out.append(indent() + "<" + tag + ">\n");
level++;
node.childrenAccept(this);
level--;
out.append(indent() + "</" + tag + ">\n");
if(!node.isNested()) { out.append("\n"); }
listSequence.pop();
}
public void visit(ListItem node) {
Integer seq = listSequence.peek() + 1;
listSequence.set(listSequence.size() - 1, seq);
out.append(indent() + "<li");
if(node.getNumber() != null && (!seq.equals(node.getNumber()))) {
out.append(" value=\"" + node.getNumber() + "\"");
listSequence.push(node.getNumber());
}
out.append(">");
if(node.getChildren() != null) {
boolean block = (node.getChildren()[0].getClass() == Paragraph.class || node.getChildren()[0].getClass() == BlockElement.class);
if(node.getChildren().length > 1 || !block) { out.append("\n"); }
level++;
node.childrenAccept(this);
level--;
if(node.getChildren().length > 1 || !block) { out.append(indent()); }
}
out.append("</li>\n");
}
public void visit(CodeBlock node) {
out.append(indent() + "<pre><code");
if(node.getLanguage() != null) {
out.append(" class=\"language-" + escape(node.getLanguage()) + "\"");
}
out.append(">");
out.append(escape(node.getValue().toString()) + "</code></pre>\n");
if(!node.isNested()) { out.append("\n"); }
}
public void visit(Paragraph node) {
if(node.isNested() && (node.getParent() instanceof ListItem) && node.isSingleChild()) {
node.childrenAccept(this);
} else {
out.append(indent() + "<p>");
node.childrenAccept(this);
out.append("</p>\n");
if(!node.isNested()) { out.append("\n"); }
}
}
@Override
public void visit(BlockElement node) {
if(node.isNested() && (node.getParent() instanceof ListItem) && node.isSingleChild()) {
node.childrenAccept(this);
} else {
out.append(indent());
node.childrenAccept(this);
if(!node.isNested()) { out.append("\n"); }
}
}
public void visit(Image node) {
out.append("<img src=\"" + escapeUrl(node.getValue().toString()) + "\" alt=\"");
node.childrenAccept(this);
out.append("\" />");
}
public void visit(Link node) {
out.append("<a href=\"" + escapeUrl(node.getValue().toString()) + "\">");
node.childrenAccept(this);
out.append("</a>");
}
public void visit(Strong node) {
out.append("<strong>");
node.childrenAccept(this);
out.append("</strong>");
}
public void visit(Em node) {
out.append("<em>");
node.childrenAccept(this);
out.append("</em>");
}
public void visit(Code node) {
out.append("<code>");
node.childrenAccept(this);
out.append("</code>");
}
public void visit(Text node) {
out.append(escape(node.getValue().toString()));
}
public String escape(String text) {
return text.replaceAll("&", "&")
.replaceAll("<", "<")
.replaceAll(">", ">")
.replaceAll("\"", """);
}
public void visit(LineBreak node) {
if(hardWrap || node.isExplicit()) {
out.append("<br>");
}
out.append("\n" + indent());
node.childrenAccept(this);
}
public String escapeUrl(String text) {
return text.replaceAll(" ", "%20")
.replaceAll("\"", "%22")
.replaceAll("`", "%60")
.replaceAll("<", "%3C")
.replaceAll(">", "%3E")
.replaceAll("\\[", "%5B")
.replaceAll("\\]", "%5D")
.replaceAll("\\\\", "%5C");
}
public String indent() {
int repeat = level * 2;
final char[] buf = new char[repeat];
for (int i = repeat - 1; i >= 0; i--) {
buf[i] = ' ';
}
return new String(buf);
}
public void setHardWrap(boolean hardWrap) {
this.hardWrap = hardWrap;
}
public String getOutput() {
return out.toString().trim();
}
}
|
|
/**
* Copyright 2008 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.nio.ByteBuffer;
import java.util.Random;
import java.util.TreeMap;
import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.io.BatchUpdate;
/**
* Regression test for HBASE-613
*/
public class TestScanMultipleVersions extends HBaseTestCase {
private final Text TABLE_NAME = new Text("TestScanMultipleVersions");
private final HRegionInfo[] INFOS = new HRegionInfo[2];
private final HRegion[] REGIONS = new HRegion[2];
// Row keys
private final Text[] ROWS = new Text[] {
new Text("row_0200"),
new Text("row_0800")
};
private final long[] TIMESTAMPS = new long[] {
100L,
1000L
};
private final Random rand = new Random();
private HTableDescriptor desc = null;
private Path rootdir = null;
private MiniDFSCluster dfsCluster = null;
/** {@inheritDoc} */
@Override
public void setUp() throws Exception {
// Create table description
this.desc = new HTableDescriptor(TABLE_NAME.toString());
this.desc.addFamily(new HColumnDescriptor(HConstants.COLUMN_FAMILY_STR));
// Region 0 will contain the key range [,row_0500)
INFOS[0] = new HRegionInfo(this.desc, HConstants.EMPTY_START_ROW,
new Text("row_0500"));
// Region 1 will contain the key range [row_0500,)
INFOS[1] = new HRegionInfo(this.desc, new Text("row_0500"),
HConstants.EMPTY_TEXT);
// start HDFS
dfsCluster = new MiniDFSCluster(conf, 2, true, (String[])null);
try {
// Set the hbase.rootdir to be the home directory in mini dfs.
this.conf.set(HConstants.HBASE_DIR,
dfsCluster.getFileSystem().getHomeDirectory().toString());
fs = dfsCluster.getFileSystem();
this.rootdir = fs.makeQualified(new Path(conf.get(HConstants.HBASE_DIR)));
fs.mkdirs(this.rootdir);
// Create root region
HRegion root = HRegion.createHRegion(HRegionInfo.rootRegionInfo,
this.rootdir, this.conf);
// Create meta region
HRegion meta = HRegion.createHRegion(HRegionInfo.firstMetaRegionInfo,
this.rootdir, this.conf);
// Insert meta into root region
HRegion.addRegionToMETA(root, meta);
// Create the regions
for (int i = 0; i < REGIONS.length; i++) {
REGIONS[i] =
HRegion.createHRegion(this.INFOS[i], this.rootdir, this.conf);
// Insert data
for (int j = 0; j < TIMESTAMPS.length; j++) {
BatchUpdate b = new BatchUpdate(rand.nextLong());
long id = b.startUpdate(ROWS[i]);
b.put(id, HConstants.COLUMN_FAMILY, toBytes(TIMESTAMPS[j]));
REGIONS[i].batchUpdate(TIMESTAMPS[j], b);
}
// Insert the region we created into the meta
HRegion.addRegionToMETA(meta, REGIONS[i]);
// Close region
REGIONS[i].close();
REGIONS[i].getLog().closeAndDelete();
}
// Close root and meta regions
root.close();
root.getLog().closeAndDelete();
meta.close();
meta.getLog().closeAndDelete();
// Call super.Setup last. Otherwise we get a local file system.
super.setUp();
} catch (Exception e) {
if (dfsCluster != null) {
StaticTestEnvironment.shutdownDfs(dfsCluster);
dfsCluster = null;
}
throw e;
}
}
/**
* @throws Exception
*/
public void testScanMultipleVersions() throws Exception {
// Now start HBase
MiniHBaseCluster cluster = new MiniHBaseCluster(conf, 1, dfsCluster, false);
try {
// At this point we have created multiple regions and both HDFS and HBase
// are running. There are 5 cases we have to test. Each is described below.
HTable t = new HTable(conf, TABLE_NAME);
// Case 1: scan with LATEST_TIMESTAMP. Should get two rows
int count = 0;
HScannerInterface s = t.obtainScanner(HConstants.COLUMN_FAMILY_ARRAY,
HConstants.EMPTY_START_ROW);
try {
HStoreKey key = new HStoreKey();
TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
while (s.next(key, results)) {
count += 1;
}
assertEquals("Number of rows should be 2", 2, count);
} finally {
s.close();
}
// Case 2: Scan with a timestamp greater than most recent timestamp
// (in this case > 1000 and < LATEST_TIMESTAMP. Should get 2 rows.
count = 0;
s = t.obtainScanner(HConstants.COLUMN_FAMILY_ARRAY,
HConstants.EMPTY_START_ROW, 10000L);
try {
HStoreKey key = new HStoreKey();
TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
while (s.next(key, results)) {
count += 1;
}
assertEquals("Number of rows should be 2", 2, count);
} finally {
s.close();
}
// Case 3: scan with timestamp equal to most recent timestamp
// (in this case == 1000. Should get 2 rows.
count = 0;
s = t.obtainScanner(HConstants.COLUMN_FAMILY_ARRAY,
HConstants.EMPTY_START_ROW, 1000L);
try {
HStoreKey key = new HStoreKey();
TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
while (s.next(key, results)) {
count += 1;
}
assertEquals("Number of rows should be 2", 2, count);
} finally {
s.close();
}
// Case 4: scan with timestamp greater than first timestamp but less than
// second timestamp (100 < timestamp < 1000). Should get 2 rows.
count = 0;
s = t.obtainScanner(HConstants.COLUMN_FAMILY_ARRAY,
HConstants.EMPTY_START_ROW, 500L);
try {
HStoreKey key = new HStoreKey();
TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
while (s.next(key, results)) {
count += 1;
}
assertEquals("Number of rows should be 2", 2, count);
} finally {
s.close();
}
// Case 5: scan with timestamp equal to first timestamp (100)
// Should get 2 rows.
count = 0;
s = t.obtainScanner(HConstants.COLUMN_FAMILY_ARRAY,
HConstants.EMPTY_START_ROW, 100L);
try {
HStoreKey key = new HStoreKey();
TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
while (s.next(key, results)) {
count += 1;
}
assertEquals("Number of rows should be 2", 2, count);
} finally {
s.close();
}
} finally {
cluster.shutdown();
}
}
/** {@inheritDoc} */
@Override
public void tearDown() throws Exception {
if (dfsCluster != null) {
StaticTestEnvironment.shutdownDfs(dfsCluster);
dfsCluster = null;
}
super.tearDown();
}
/*
* Convert a long value to a byte array
* @param val
* @return the byte array
*/
private static byte[] toBytes(final long val) {
ByteBuffer bb = ByteBuffer.allocate(Long.SIZE/Byte.SIZE);
bb.putLong(val);
return bb.array();
}
}
|
|
/*
* The MIT License
*
* Copyright 2011 Yahoo!, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.tasks;
import hudson.Util;
import hudson.XmlFile;
import hudson.matrix.Axis;
import hudson.matrix.AxisList;
import hudson.matrix.MatrixProject;
import hudson.model.AbstractProject;
import hudson.model.Fingerprint;
import hudson.model.FingerprintCleanupThread;
import hudson.model.FreeStyleBuild;
import hudson.model.FreeStyleProject;
import hudson.model.Hudson;
import hudson.model.Result;
import hudson.util.RunList;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import static org.junit.Assert.*;
import hudson.util.StreamTaskListener;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.jvnet.hudson.test.Bug;
import org.jvnet.hudson.test.JenkinsRule;
import org.jvnet.hudson.test.recipes.LocalData;
/**
*
* @author dty
*/
@SuppressWarnings("rawtypes")
public class FingerprinterTest {
private static final String[] singleContents = {
"abcdef"
};
private static final String[] singleFiles = {
"test.txt"
};
private static final String[] singleContents2 = {
"ghijkl"
};
private static final String[] singleFiles2 = {
"test2.txt"
};
private static final String[] doubleContents = {
"abcdef",
"ghijkl"
};
private static final String[] doubleFiles = {
"test.txt",
"test2.txt"
};
private static final String renamedProject1 = "renamed project 1";
private static final String renamedProject2 = "renamed project 2";
@Rule public JenkinsRule j = new JenkinsRule();
@BeforeClass
public static void setUp() throws Exception {
Fingerprinter.enableFingerprintsInDependencyGraph = true;
}
@Test public void fingerprintDependencies() throws Exception {
FreeStyleProject upstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
FreeStyleProject downstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
j.assertBuildStatusSuccess(upstream.scheduleBuild2(0).get());
j.assertBuildStatusSuccess(downstream.scheduleBuild2(0).get());
List<AbstractProject> downstreamProjects = upstream.getDownstreamProjects();
List<AbstractProject> upstreamProjects = downstream.getUpstreamProjects();
assertEquals(1, downstreamProjects.size());
assertEquals(1, upstreamProjects.size());
assertTrue(upstreamProjects.contains(upstream));
assertTrue(downstreamProjects.contains(downstream));
}
@Test public void multipleUpstreamDependencies() throws Exception {
FreeStyleProject upstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
FreeStyleProject upstream2 = createFreeStyleProjectWithFingerprints(singleContents2, singleFiles2);
FreeStyleProject downstream = createFreeStyleProjectWithFingerprints(doubleContents, doubleFiles);
j.assertBuildStatusSuccess(upstream.scheduleBuild2(0).get());
j.assertBuildStatusSuccess(upstream2.scheduleBuild2(0).get());
j.assertBuildStatusSuccess(downstream.scheduleBuild2(0).get());
List<AbstractProject> downstreamProjects = upstream.getDownstreamProjects();
List<AbstractProject> downstreamProjects2 = upstream2.getDownstreamProjects();
List<AbstractProject> upstreamProjects = downstream.getUpstreamProjects();
assertEquals(1, downstreamProjects.size());
assertEquals(1, downstreamProjects2.size());
assertEquals(2, upstreamProjects.size());
assertTrue(upstreamProjects.contains(upstream));
assertTrue(upstreamProjects.contains(upstream2));
assertTrue(downstreamProjects.contains(downstream));
}
@Test public void multipleDownstreamDependencies() throws Exception {
FreeStyleProject upstream = createFreeStyleProjectWithFingerprints(doubleContents, doubleFiles);
FreeStyleProject downstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
FreeStyleProject downstream2 = createFreeStyleProjectWithFingerprints(singleContents2, singleFiles2);
j.assertBuildStatusSuccess(upstream.scheduleBuild2(0).get());
j.assertBuildStatusSuccess(downstream.scheduleBuild2(0).get());
j.assertBuildStatusSuccess(downstream2.scheduleBuild2(0).get());
List<AbstractProject> downstreamProjects = upstream.getDownstreamProjects();
List<AbstractProject> upstreamProjects = downstream.getUpstreamProjects();
List<AbstractProject> upstreamProjects2 = downstream2.getUpstreamProjects();
assertEquals(2, downstreamProjects.size());
assertEquals(1, upstreamProjects.size());
assertEquals(1, upstreamProjects2.size());
assertTrue(upstreamProjects.contains(upstream));
assertTrue(upstreamProjects2.contains(upstream));
assertTrue(downstreamProjects.contains(downstream));
assertTrue(downstreamProjects.contains(downstream2));
}
@Test public void dependencyExclusion() throws Exception {
FreeStyleProject upstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
FreeStyleProject downstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
FreeStyleBuild upstreamBuild = j.assertBuildStatusSuccess(upstream.scheduleBuild2(0).get());
j.assertBuildStatusSuccess(downstream.scheduleBuild2(0).get());
upstreamBuild.delete();
Hudson.getInstance().rebuildDependencyGraph();
List<AbstractProject> upstreamProjects = downstream.getUpstreamProjects();
List<AbstractProject> downstreamProjects = upstream.getDownstreamProjects();
assertEquals(0, upstreamProjects.size());
assertEquals(0, downstreamProjects.size());
}
@Test public void circularDependency() throws Exception {
FreeStyleProject p = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
j.assertBuildStatusSuccess(p.scheduleBuild2(0).get());
j.assertBuildStatusSuccess(p.scheduleBuild2(0).get());
List<AbstractProject> upstreamProjects = p.getUpstreamProjects();
List<AbstractProject> downstreamProjects = p.getDownstreamProjects();
assertEquals(0, upstreamProjects.size());
assertEquals(0, downstreamProjects.size());
}
@Test public void matrixDependency() throws Exception {
MatrixProject matrixProject = j.createMatrixProject();
matrixProject.setAxes(new AxisList(new Axis("foo", "a", "b")));
FreeStyleProject freestyleProject = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
addFingerprinterToProject(matrixProject, singleContents, singleFiles);
j.jenkins.rebuildDependencyGraph();
j.buildAndAssertSuccess(matrixProject);
j.buildAndAssertSuccess(freestyleProject);
j.waitUntilNoActivity();
RunList<FreeStyleBuild> builds = freestyleProject.getBuilds();
assertEquals("There should only be one FreestyleBuild", 1, builds.size());
FreeStyleBuild build = builds.iterator().next();
assertEquals(Result.SUCCESS, build.getResult());
List<AbstractProject> downstream = j.jenkins.getDependencyGraph().getDownstream(matrixProject);
assertTrue(downstream.contains(freestyleProject));
List<AbstractProject> upstream = j.jenkins.getDependencyGraph().getUpstream(freestyleProject);
assertTrue(upstream.contains(matrixProject));
}
@Test public void projectRename() throws Exception {
FreeStyleProject upstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
FreeStyleProject downstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
FreeStyleBuild upstreamBuild = j.assertBuildStatusSuccess(upstream.scheduleBuild2(0).get());
FreeStyleBuild downstreamBuild = j.assertBuildStatusSuccess(downstream.scheduleBuild2(0).get());
String oldUpstreamName = upstream.getName();
String oldDownstreamName = downstream.getName();
// Verify that owner entry in fingerprint record is changed
// after source project is renamed
upstream.renameTo(renamedProject1);
Fingerprinter.FingerprintAction action = upstreamBuild.getAction(Fingerprinter.FingerprintAction.class);
assertNotNull(action);
Collection<Fingerprint> fingerprints = action.getFingerprints().values();
for (Fingerprint f: fingerprints) {
assertTrue(f.getOriginal().is(upstream));
assertTrue(f.getOriginal().getName().equals(renamedProject1));
assertFalse(f.getOriginal().getName().equals(oldUpstreamName));
}
action = downstreamBuild.getAction(Fingerprinter.FingerprintAction.class);
assertNotNull(action);
fingerprints = action.getFingerprints().values();
for (Fingerprint f: fingerprints) {
assertTrue(f.getOriginal().is(upstream));
assertTrue(f.getOriginal().getName().equals(renamedProject1));
assertFalse(f.getOriginal().getName().equals(oldUpstreamName));
}
// Verify that usage entry in fingerprint record is changed after
// sink project is renamed
downstream.renameTo(renamedProject2);
upstream.renameTo(renamedProject1);
action = upstreamBuild.getAction(Fingerprinter.FingerprintAction.class);
assertNotNull(action);
fingerprints = action.getFingerprints().values();
for (Fingerprint f: fingerprints) {
List<String> jobs = f.getJobs();
assertTrue(jobs.contains(renamedProject2));
assertFalse(jobs.contains(oldDownstreamName));
}
action = downstreamBuild.getAction(Fingerprinter.FingerprintAction.class);
assertNotNull(action);
fingerprints = action.getFingerprints().values();
for (Fingerprint f: fingerprints) {
List<String> jobs = f.getJobs();
assertTrue(jobs.contains(renamedProject2));
assertFalse(jobs.contains(oldDownstreamName));
}
}
@Bug(17125)
@LocalData
@Test public void actionSerialization() throws Exception {
FreeStyleProject job = j.jenkins.getItemByFullName("j", FreeStyleProject.class);
assertNotNull(job);
FreeStyleBuild build = job.getBuildByNumber(2);
assertNotNull(build);
Fingerprinter.FingerprintAction action = build.getAction(Fingerprinter.FingerprintAction.class);
assertNotNull(action);
assertEquals(build, action.getBuild());
assertEquals("{a=2d5fac981a2e865baf0e15db655c7d63}", action.getRecords().toString());
j.assertBuildStatusSuccess(job.scheduleBuild2(0));
job._getRuns().purgeCache(); // force build records to be reloaded
build = job.getBuildByNumber(3);
assertNotNull(build);
System.out.println(new XmlFile(new File(build.getRootDir(), "build.xml")).asString());
action = build.getAction(Fingerprinter.FingerprintAction.class);
assertNotNull(action);
assertEquals(build, action.getBuild());
assertEquals("{a=f31efcf9afe30617d6c46b919e702822}", action.getRecords().toString());
}
@SuppressWarnings("unchecked")
@Bug(18417)
@Test
public void fingerprintCleanup() throws Exception {
Assume.assumeFalse("for p3.upstreamProjects expected:<[hudson.model.FreeStyleProject@590e5b8[test0]]> but was:<[]>", "https://jenkins.ci.cloudbees.com/job/core/job/jenkins_main_trunk/".equals(System.getenv("JOB_URL")));
// file names shouldn't matter
FreeStyleProject p1 = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
FreeStyleProject p2 = createFreeStyleProjectWithFingerprints(singleContents, singleFiles2);
FreeStyleProject p3 = createFreeStyleProjectWithFingerprints(singleContents, singleFiles);
j.assertBuildStatusSuccess(p1.scheduleBuild2(0));
j.assertBuildStatusSuccess(p2.scheduleBuild2(0));
j.assertBuildStatusSuccess(p3.scheduleBuild2(0));
Fingerprint f = j.jenkins._getFingerprint(Util.getDigestOf(singleContents[0]+"\n"));
assertEquals(3,f.getUsages().size());
assertEquals(Arrays.asList(p1), p2.getUpstreamProjects());
assertEquals(Arrays.asList(p1), p3.getUpstreamProjects());
assertEquals(new HashSet(Arrays.asList(p2,p3)), new HashSet(p1.getDownstreamProjects()));
// discard the p3 records
p3.delete();
new FingerprintCleanupThread().execute(StreamTaskListener.fromStdout());
// records for p3 should have been deleted now
assertEquals(2,f.getUsages().size());
assertEquals(Arrays.asList(p1), p2.getUpstreamProjects());
assertEquals(Arrays.asList(p2), p1.getDownstreamProjects());
// do a new build in p2 #2 that points to a separate fingerprints
p2.getBuildersList().clear();
p2.getPublishersList().clear();
addFingerprinterToProject(p2,singleContents2,singleFiles2);
j.assertBuildStatusSuccess(p2.scheduleBuild2(0));
// another garbage collection that gets rid of p2 records from the fingerprint
p2.getBuildByNumber(1).delete();
new FingerprintCleanupThread().execute(StreamTaskListener.fromStdout());
assertEquals(1,f.getUsages().size());
}
private FreeStyleProject createFreeStyleProjectWithFingerprints(String[] contents, String[] files) throws IOException, Exception {
FreeStyleProject project = j.createFreeStyleProject();
addFingerprinterToProject(project, contents, files);
return project;
}
private void addFingerprinterToProject(AbstractProject<?, ?> project, String[] contents, String[] files) throws Exception {
StringBuilder targets = new StringBuilder();
for (int i = 0; i < contents.length; i++) {
if (project instanceof MatrixProject) {
((MatrixProject)project).getBuildersList().add(new Shell("echo " + contents[i] + " > " + files[i]));
} else {
((FreeStyleProject)project).getBuildersList().add(new Shell("echo " + contents[i] + " > " + files[i]));
}
targets.append(files[i]).append(',');
}
project.getPublishersList().add(new Fingerprinter(targets.toString(), false));
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.matrix.data;
import static jcuda.jcudnn.JCudnn.cudnnCreateFilterDescriptor;
import static jcuda.jcudnn.JCudnn.cudnnCreateTensorDescriptor;
import static jcuda.jcudnn.JCudnn.cudnnDestroyFilterDescriptor;
import static jcuda.jcudnn.JCudnn.cudnnDestroyTensorDescriptor;
import static jcuda.jcudnn.JCudnn.cudnnSetTensorNdDescriptor;
import static jcuda.jcudnn.JCudnn.cudnnDestroyDropoutDescriptor;
import static jcuda.jcudnn.JCudnn.cudnnDestroyRNNDescriptor;
import static jcuda.jcudnn.cudnnTensorFormat.CUDNN_TENSOR_NCHW;
import static jcuda.jcudnn.JCudnn.cudnnCreateRNNDescriptor;
import static jcuda.jcudnn.cudnnRNNInputMode.CUDNN_LINEAR_INPUT;
import static jcuda.jcudnn.cudnnDirectionMode.CUDNN_UNIDIRECTIONAL;
import static jcuda.jcudnn.cudnnRNNAlgo.CUDNN_RNN_ALGO_STANDARD;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
import org.apache.sysml.runtime.instructions.gpu.context.GPUContext;
import jcuda.Pointer;
import jcuda.jcudnn.JCudnn;
import jcuda.jcudnn.cudnnDropoutDescriptor;
import jcuda.jcudnn.cudnnFilterDescriptor;
import jcuda.jcudnn.cudnnRNNDescriptor;
import jcuda.jcudnn.cudnnTensorDescriptor;
public class LibMatrixCuDNNRnnAlgorithm implements java.lang.AutoCloseable {
private static final Log LOG = LogFactory.getLog(LibMatrixCuDNNRnnAlgorithm.class.getName());
GPUContext gCtx;
String instName;
cudnnDropoutDescriptor dropoutDesc;
cudnnRNNDescriptor rnnDesc;
cudnnTensorDescriptor[] xDesc, dxDesc, yDesc, dyDesc; // of length T
cudnnTensorDescriptor hxDesc, cxDesc, hyDesc, cyDesc, dhxDesc, dcxDesc, dhyDesc, dcyDesc;
cudnnFilterDescriptor wDesc;
cudnnFilterDescriptor dwDesc;
long sizeInBytes; Pointer workSpace;
long reserveSpaceSizeInBytes; Pointer reserveSpace;
long dropOutSizeInBytes; Pointer dropOutStateSpace;
public LibMatrixCuDNNRnnAlgorithm(ExecutionContext ec, GPUContext gCtx, String instName,
String rnnMode, int N, int T, int M, int D, boolean isTraining, Pointer w) throws DMLRuntimeException {
this.gCtx = gCtx;
this.instName = instName;
// Allocate input/output descriptors
xDesc = new cudnnTensorDescriptor[T];
dxDesc = new cudnnTensorDescriptor[T];
yDesc = new cudnnTensorDescriptor[T];
dyDesc = new cudnnTensorDescriptor[T];
for(int t = 0; t < T; t++) {
xDesc[t] = allocateTensorDescriptorWithStride(N, D, 1);
dxDesc[t] = allocateTensorDescriptorWithStride(N, D, 1);
yDesc[t] = allocateTensorDescriptorWithStride(N, M, 1);
dyDesc[t] = allocateTensorDescriptorWithStride(N, M, 1);
}
hxDesc = allocateTensorDescriptorWithStride(1, N, M);
dhxDesc = allocateTensorDescriptorWithStride(1, N, M);
cxDesc = allocateTensorDescriptorWithStride(1, N, M);
dcxDesc = allocateTensorDescriptorWithStride(1, N, M);
hyDesc = allocateTensorDescriptorWithStride(1, N, M);
dhyDesc = allocateTensorDescriptorWithStride(1, N, M);
cyDesc = allocateTensorDescriptorWithStride(1, N, M);
dcyDesc = allocateTensorDescriptorWithStride(1, N, M);
// Initial dropout descriptor
dropoutDesc = new cudnnDropoutDescriptor();
JCudnn.cudnnCreateDropoutDescriptor(dropoutDesc);
long [] _dropOutSizeInBytes = {-1};
JCudnn.cudnnDropoutGetStatesSize(gCtx.getCudnnHandle(), _dropOutSizeInBytes);
dropOutSizeInBytes = _dropOutSizeInBytes[0];
dropOutStateSpace = new Pointer();
if (dropOutSizeInBytes != 0) {
if(LOG.isDebugEnabled())
LOG.debug("Allocating " + dropOutSizeInBytes + " bytes for lstm dropout space.");
dropOutStateSpace = gCtx.allocate(instName, dropOutSizeInBytes);
}
JCudnn.cudnnSetDropoutDescriptor(dropoutDesc, gCtx.getCudnnHandle(), 0, dropOutStateSpace, dropOutSizeInBytes, 12345);
// Initialize RNN descriptor
rnnDesc = new cudnnRNNDescriptor();
cudnnCreateRNNDescriptor(rnnDesc);
JCudnn.cudnnSetRNNDescriptor_v6(gCtx.getCudnnHandle(), rnnDesc, M, 1, dropoutDesc,
CUDNN_LINEAR_INPUT, CUDNN_UNIDIRECTIONAL,
getCuDNNRnnMode(rnnMode), CUDNN_RNN_ALGO_STANDARD, LibMatrixCUDA.CUDNN_DATA_TYPE);
// Allocate filter descriptor
int expectedNumWeights = getExpectedNumWeights();
if(rnnMode.equalsIgnoreCase("lstm") && (D+M+2)*4*M != expectedNumWeights) {
throw new DMLRuntimeException("Incorrect number of RNN parameters " + (D+M+2)*4*M + " != " + expectedNumWeights + ", where numFeatures=" + D + ", hiddenSize=" + M);
}
wDesc = allocateFilterDescriptor(expectedNumWeights);
dwDesc = allocateFilterDescriptor(expectedNumWeights);
// Setup workspace
workSpace = new Pointer(); reserveSpace = new Pointer();
sizeInBytes = getWorkspaceSize(T);
if(sizeInBytes != 0) {
if(LOG.isDebugEnabled())
LOG.debug("Allocating " + sizeInBytes + " bytes for lstm workspace.");
workSpace = gCtx.allocate(instName, sizeInBytes);
}
reserveSpaceSizeInBytes = 0;
if(isTraining) {
reserveSpaceSizeInBytes = getReservespaceSize(T);
if (reserveSpaceSizeInBytes != 0) {
if(LOG.isDebugEnabled())
LOG.debug("Allocating " + reserveSpaceSizeInBytes + " bytes for lstm reserve space.");
reserveSpace = gCtx.allocate(instName, reserveSpaceSizeInBytes);
}
}
}
@SuppressWarnings("unused")
private int getNumLinearLayers(String rnnMode) throws DMLRuntimeException {
int ret = 0;
if(rnnMode.equalsIgnoreCase("rnn_relu") || rnnMode.equalsIgnoreCase("rnn_tanh")) {
ret = 2;
}
else if(rnnMode.equalsIgnoreCase("lstm")) {
ret = 8;
}
else if(rnnMode.equalsIgnoreCase("gru")) {
ret = 6;
}
else {
throw new DMLRuntimeException("Unsupported rnn mode:" + rnnMode);
}
return ret;
}
private long getWorkspaceSize(int seqLength) {
long [] sizeInBytesArray = new long[1];
JCudnn.cudnnGetRNNWorkspaceSize(gCtx.getCudnnHandle(), rnnDesc, seqLength, xDesc, sizeInBytesArray);
return sizeInBytesArray[0];
}
private long getReservespaceSize(int seqLength) {
long [] sizeInBytesArray = new long[1];
JCudnn.cudnnGetRNNTrainingReserveSize(gCtx.getCudnnHandle(), rnnDesc, seqLength, xDesc, sizeInBytesArray);
return sizeInBytesArray[0];
}
private int getCuDNNRnnMode(String rnnMode) throws DMLRuntimeException {
int rnnModeVal = -1;
if(rnnMode.equalsIgnoreCase("rnn_relu")) {
rnnModeVal = jcuda.jcudnn.cudnnRNNMode.CUDNN_RNN_RELU;
}
else if(rnnMode.equalsIgnoreCase("rnn_tanh")) {
rnnModeVal = jcuda.jcudnn.cudnnRNNMode.CUDNN_RNN_TANH;
}
else if(rnnMode.equalsIgnoreCase("lstm")) {
rnnModeVal = jcuda.jcudnn.cudnnRNNMode.CUDNN_LSTM;
}
else if(rnnMode.equalsIgnoreCase("gru")) {
rnnModeVal = jcuda.jcudnn.cudnnRNNMode.CUDNN_GRU;
}
else {
throw new DMLRuntimeException("Unsupported rnn mode:" + rnnMode);
}
return rnnModeVal;
}
private int getExpectedNumWeights() throws DMLRuntimeException {
long [] weightSizeInBytesArray = {-1}; // (D+M+2)*4*M
JCudnn.cudnnGetRNNParamsSize(gCtx.getCudnnHandle(), rnnDesc, xDesc[0], weightSizeInBytesArray, LibMatrixCUDA.CUDNN_DATA_TYPE);
// check if (D+M+2)*4M == weightsSize / sizeof(dataType) where weightsSize is given by 'cudnnGetRNNParamsSize'.
return LibMatrixCUDA.toInt(weightSizeInBytesArray[0]/LibMatrixCUDA.sizeOfDataType);
}
private cudnnFilterDescriptor allocateFilterDescriptor(int numWeights) {
cudnnFilterDescriptor filterDesc = new cudnnFilterDescriptor();
cudnnCreateFilterDescriptor(filterDesc);
JCudnn.cudnnSetFilterNdDescriptor(filterDesc, LibMatrixCUDA.CUDNN_DATA_TYPE, CUDNN_TENSOR_NCHW, 3, new int[] {numWeights, 1, 1});
return filterDesc;
}
private static cudnnTensorDescriptor allocateTensorDescriptorWithStride(int firstDim, int secondDim, int thirdDim) throws DMLRuntimeException {
cudnnTensorDescriptor tensorDescriptor = new cudnnTensorDescriptor();
cudnnCreateTensorDescriptor(tensorDescriptor);
int [] dimA = new int[] {firstDim, secondDim, thirdDim};
int [] strideA = new int[] {dimA[2] * dimA[1], dimA[2], 1};
cudnnSetTensorNdDescriptor(tensorDescriptor, LibMatrixCUDA.CUDNN_DATA_TYPE, 3, dimA, strideA);
return tensorDescriptor;
}
@Override
public void close() {
if(dropoutDesc != null)
cudnnDestroyDropoutDescriptor(dropoutDesc);
dropoutDesc = null;
if(rnnDesc != null)
cudnnDestroyRNNDescriptor(rnnDesc);
rnnDesc = null;
if(hxDesc != null)
cudnnDestroyTensorDescriptor(hxDesc);
hxDesc = null;
if(dhxDesc != null)
cudnnDestroyTensorDescriptor(dhxDesc);
dhxDesc = null;
if(hyDesc != null)
cudnnDestroyTensorDescriptor(hyDesc);
hyDesc = null;
if(dhyDesc != null)
cudnnDestroyTensorDescriptor(dhyDesc);
dhyDesc = null;
if(cxDesc != null)
cudnnDestroyTensorDescriptor(cxDesc);
cxDesc = null;
if(dcxDesc != null)
cudnnDestroyTensorDescriptor(dcxDesc);
dcxDesc = null;
if(cyDesc != null)
cudnnDestroyTensorDescriptor(cyDesc);
cyDesc = null;
if(dcyDesc != null)
cudnnDestroyTensorDescriptor(dcyDesc);
dcyDesc = null;
if(wDesc != null)
cudnnDestroyFilterDescriptor(wDesc);
wDesc = null;
if(dwDesc != null)
cudnnDestroyFilterDescriptor(dwDesc);
dwDesc = null;
if(xDesc != null) {
for(cudnnTensorDescriptor dsc : xDesc) {
cudnnDestroyTensorDescriptor(dsc);
}
xDesc = null;
}
if(dxDesc != null) {
for(cudnnTensorDescriptor dsc : dxDesc) {
cudnnDestroyTensorDescriptor(dsc);
}
dxDesc = null;
}
if(yDesc != null) {
for(cudnnTensorDescriptor dsc : yDesc) {
cudnnDestroyTensorDescriptor(dsc);
}
yDesc = null;
}
if(dyDesc != null) {
for(cudnnTensorDescriptor dsc : dyDesc) {
cudnnDestroyTensorDescriptor(dsc);
}
dyDesc = null;
}
if(sizeInBytes != 0) {
try {
gCtx.cudaFreeHelper(instName, workSpace, gCtx.EAGER_CUDA_FREE);
} catch (DMLRuntimeException e) {
throw new RuntimeException(e);
}
}
workSpace = null;
if(reserveSpaceSizeInBytes != 0) {
try {
gCtx.cudaFreeHelper(instName, reserveSpace, gCtx.EAGER_CUDA_FREE);
} catch (DMLRuntimeException e) {
throw new RuntimeException(e);
}
}
reserveSpace = null;
if(dropOutSizeInBytes != 0) {
try {
gCtx.cudaFreeHelper(instName, dropOutStateSpace, gCtx.EAGER_CUDA_FREE);
} catch (DMLRuntimeException e) {
throw new RuntimeException(e);
}
}
dropOutStateSpace = null;
}
}
|
|
package org.broadinstitute.hellbender.tools.copynumber;
import htsjdk.samtools.SAMSequenceDictionary;
import htsjdk.samtools.util.Interval;
import htsjdk.samtools.util.IntervalList;
import org.apache.commons.math3.util.FastMath;
import org.broadinstitute.barclay.argparser.Argument;
import org.broadinstitute.barclay.argparser.CommandLineProgramProperties;
import org.broadinstitute.barclay.help.DocumentedFeature;
import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions;
import org.broadinstitute.hellbender.engine.GATKTool;
import org.broadinstitute.hellbender.engine.ReferenceDataSource;
import org.broadinstitute.hellbender.tools.copynumber.arguments.CopyNumberArgumentValidationUtils;
import org.broadinstitute.hellbender.utils.*;
import picard.cmdline.programgroups.IntervalsManipulationProgramGroup;
import java.io.File;
import java.util.List;
import java.util.stream.Collectors;
/**
* Prepares bins for coverage collection.
*
* <p>
* The input intervals are first checked for overlapping intervals, which are merged.
* The resulting intervals are then padded. The padded intervals are then split into bins.
* Finally, bins that contain only Ns are filtered out.
* </p>
*
* <h3>Inputs</h3>
*
* <ul>
* <li>
* Reference FASTA file
* </li>
* <li>
* Intervals to be preprocessed.
* The argument {@code interval-merging-rule} must be set to {@link IntervalMergingRule#OVERLAPPING_ONLY}
* and all other common arguments for interval padding or merging must be set to their defaults.
* If no intervals are specified, then each contig will be assumed to be a single interval and binned accordingly;
* this produces bins appropriate for whole genome sequencing analyses.
* </li>
* <li>
* Padding length (in bp).
* Use {@code padding} to specify the size of each of the regions added to both ends of the intervals that result
* after overlapping intervals have been merged. Do not use the common {@code interval-padding} argument.
* Intervals that would overlap after padding by the specified amount are instead only
* padded until they are adjacent.
* </li>
* <li>
* Bin length (in bp).
* If this length is not commensurate with the length of a padded interval, then the last bin will be of
* different length than the others in that interval. If zero is specified, then no binning will be performed;
* this is generally appropriate for targeted analyses.
* </li>
* </ul>
*
* <h3>Output</h3>
*
* <ul>
* <li>
* Preprocessed Picard interval-list file.
* </li>
* </ul>
*
* <h3>Usage examples</h3>
*
* To pad intervals by 250 bases and disable binning (e.g., for targeted analyses):
*
* <pre>
* gatk PreprocessIntervals \
* -R reference.fa \
* -L intervals.interval_list \
* --bin-length 0 \
* --padding 250 \
* -O preprocessed_intervals.interval_list
* </pre>
*
* To generate consecutive bins of 1000 bases from the reference (e.g., for whole genome sequencing analyses):
*
* <pre>
* gatk PreprocessIntervals \
* -R reference.fa \
* --bin-length 1000 \
* --padding 0 \
* -O preprocessed_intervals.interval_list
* </pre>
*
* @author Marton Kanasz-Nagy <mkanaszn@broadinstitute.org>
* @author Samuel Lee <slee@broadinstitute.org>
*/
@CommandLineProgramProperties(
summary = "Prepares bins for coverage collection",
oneLineSummary = "Prepares bins for coverage collection",
programGroup = IntervalsManipulationProgramGroup.class
)
@DocumentedFeature
public final class PreprocessIntervals extends GATKTool {
public static final String BIN_LENGTH_LONG_NAME = "bin-length";
public static final String PADDING_LONG_NAME = "padding";
@Argument(
doc = "Length (in bp) of the bins. If zero, no binning will be performed.",
fullName = BIN_LENGTH_LONG_NAME,
optional = true,
minValue = 0
)
private int binLength = 1000;
@Argument(
doc = "Length (in bp) of the padding regions on each side of the intervals.",
fullName = PADDING_LONG_NAME,
optional = true,
minValue = 0
)
private int padding = 250;
@Argument(
doc = "Output Picard interval-list file containing the preprocessed intervals.",
fullName = StandardArgumentDefinitions.OUTPUT_LONG_NAME,
shortName = StandardArgumentDefinitions.OUTPUT_SHORT_NAME
)
private File outputPreprocessedIntervalsFile;
@Override
public boolean requiresReference() {
return true;
}
@Override
public void onTraversalStart() {
validateArguments();
}
@Override
public void traverse() {} // no traversal for this tool
@Override
public Object onTraversalSuccess() {
final SAMSequenceDictionary sequenceDictionary = getBestAvailableSequenceDictionary();
final List<SimpleInterval> inputIntervals = hasUserSuppliedIntervals()
? intervalArgumentCollection.getIntervals(sequenceDictionary)
: IntervalUtils.getAllIntervalsForReference(sequenceDictionary); // if the user didn't add any intervals,
// we assume that they wanted to do whole genome sequencing
logger.info("Padding intervals...");
final IntervalList paddedIntervalList = padIntervals(inputIntervals, padding, sequenceDictionary);
logger.info("Generating bins...");
final IntervalList unfilteredBins = generateBins(paddedIntervalList, binLength, sequenceDictionary);
logger.info("Filtering bins containing only Ns...");
final ReferenceDataSource reference = ReferenceDataSource.of(referenceArguments.getReferencePath());
final IntervalList bins = filterBinsContainingOnlyNs(unfilteredBins, reference);
logger.info(String.format("Writing bins to %s...", outputPreprocessedIntervalsFile.getAbsolutePath()));
bins.write(outputPreprocessedIntervalsFile);
logger.info(String.format("%s complete.", getClass().getSimpleName()));
return null;
}
private void validateArguments() {
if (hasUserSuppliedIntervals()) {
CopyNumberArgumentValidationUtils.validateIntervalArgumentCollection(intervalArgumentCollection);
}
CopyNumberArgumentValidationUtils.validateOutputFiles(outputPreprocessedIntervalsFile);
}
private static IntervalList padIntervals(final List<SimpleInterval> inputIntervals, final int padding, final SAMSequenceDictionary sequenceDictionary) {
final List<SimpleInterval> paddedIntervals = inputIntervals.stream()
.map(i -> new SimpleInterval(
i.getContig(),
Math.max(1, i.getStart() - padding),
Math.min(i.getEnd() + padding, sequenceDictionary.getSequence(i.getContig()).getSequenceLength())))
.collect(Collectors.toList());
// alter the padded intervals in place to eliminate overlaps
for (int i = 0; i < paddedIntervals.size() - 1; i++) {
final SimpleInterval thisInterval = paddedIntervals.get(i);
final SimpleInterval nextInterval = paddedIntervals.get(i + 1);
if (thisInterval.overlaps(nextInterval)) {
final int originalThisEnd = inputIntervals.get(i).getEnd();
final int originalNextStart = inputIntervals.get(i + 1).getStart();
final int newThisEnd = (originalThisEnd + originalNextStart) / 2;
final int newNextStart = newThisEnd + 1;
paddedIntervals.set(i, new SimpleInterval(thisInterval.getContig(), thisInterval.getStart(), newThisEnd));
paddedIntervals.set(i + 1, new SimpleInterval(nextInterval.getContig(), newNextStart, nextInterval.getEnd()));
}
}
final IntervalList paddedIntervalList = new IntervalList(sequenceDictionary);
paddedIntervals.forEach(i -> paddedIntervalList.add(new Interval(i.getContig(), i.getStart(), i.getEnd())));
return paddedIntervalList;
}
private static IntervalList generateBins(final IntervalList preparedIntervalList, final int binLength, final SAMSequenceDictionary sequenceDictionary) {
if (binLength == 0) {
return IntervalList.copyOf(preparedIntervalList);
}
final IntervalList bins = new IntervalList(sequenceDictionary);
for (final Interval interval : preparedIntervalList) {
for (int binStart = interval.getStart(); binStart <= interval.getEnd(); binStart += binLength) {
final int binEnd = FastMath.min(binStart + binLength - 1, interval.getEnd());
bins.add(new Interval(interval.getContig(), binStart, binEnd));
}
}
return bins;
}
private static IntervalList filterBinsContainingOnlyNs(final IntervalList unfilteredBins, final ReferenceDataSource reference) {
final IntervalList bins = new IntervalList(reference.getSequenceDictionary());
for (final Interval unfilteredBin : unfilteredBins) {
if (!Utils.stream(reference.query(new SimpleInterval(unfilteredBin))).allMatch(b -> Nucleotide.decode(b) == Nucleotide.N)) {
bins.add(unfilteredBin);
}
}
return bins;
}
}
|
|
/*
* Copyright 2004,2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ws.secpolicy;
import javax.xml.namespace.QName;
public class SPConstants {
public static final String P_NS = "http://schemas.xmlsoap.org/ws/2004/09/policy";
public static final String P_PREFIX = "wsp";
public static final QName POLICY = new QName(P_NS, "Policy", P_PREFIX);
public static final int SP_V11 = 1;
public static final int SP_V12 = 2;
// //////////////////////////////////////////////////////////////////////////////////////////////
public final static String LAYOUT = "Layout";
/**
* Security Header Layout : Strict
*/
public final static String LAYOUT_STRICT = "Strict";
/**
* Security Header Layout : Lax
*/
public final static String LAYOUT_LAX = "Lax";
/**
* Security Header Layout : LaxTimestampFirst
*/
public final static String LAYOUT_LAX_TIMESTAMP_FIRST = "LaxTimestampFirst";
/**
* Security Header Layout : LaxTimestampLast
*/
public final static String LAYOUT_LAX_TIMESTAMP_LAST = "LaxTimestampLast";
// //////////////////////////////////////////////////////////////////////////////////////////////
/**
* Protection Order : EncryptBeforeSigning
*/
public final static String ENCRYPT_BEFORE_SIGNING = "EncryptBeforeSigning";
/**
* Protection Order : SignBeforeEncrypting
*/
public final static String SIGN_BEFORE_ENCRYPTING = "SignBeforeEncrypting";
// //////////////////////////////////////////////////////////////////////////////////////////////
public final static String ENCRYPT_SIGNATURE = "EncryptSignature";
// //////////////////////////////////////////////////////////////////////////////////////////////
public final static String PROTECT_TOKENS = "ProtectTokens";
// //////////////////////////////////////////////////////////////////////////////////////////////
public final static String ONLY_SIGN_ENTIRE_HEADERS_AND_BODY = "OnlySignEntireHeadersAndBody";
// //////////////////////////////////////////////////////////////////////////////////////////////
public final static String INCLUDE_TIMESTAMP = "IncludeTimestamp";
// //////////////////////////////////////////////////////////////////////////////////////////////
public final static String SIGNED_PARTS = "SignedParts";
public final static String ENCRYPTED_PARTS = "EncryptedParts";
public final static String SIGNED_ELEMENTS = "SignedElements";
public final static String ENCRYPTED_ELEMENTS = "EncryptedElements";
public final static String REQUIRED_ELEMENTS = "RequiredElements";
public final static String CONTENT_ENCRYPTED_ELEMENTS = "ContentEncryptedElements";
public final static String REQUIRED_PARTS = "RequiredParts";
public final static String XPATH_VERSION = "XPathVersion";
public final static String XPATH_EXPR = "XPath";
// //////////////////////////////////////////////////////////////////////////////////////////////
// X509 Token types
public final static String X509_TOKEN = "X509Token";
public final static String WSS_X509_V1_TOKEN10 = "WssX509V1Token10";
public final static String WSS_X509_V3_TOKEN10 = "WssX509V3Token10";
public final static String WSS_X509_PKCS7_TOKEN10 = "WssX509Pkcs7Token10";
public final static String WSS_X509_PKI_PATH_V1_TOKEN10 = "WssX509PkiPathV1Token10";
public final static String WSS_X509_V1_TOKEN11 = "WssX509V1Token11";
public final static String WSS_X509_V3_TOKEN11 = "WssX509V3Token11";
public final static String WSS_X509_PKCS7_TOKEN11 = "WssX509Pkcs7Token11";
public final static String WSS_X509_PKI_PATH_V1_TOKEN11 = "WssX509PkiPathV1Token11";
public final static String USERNAME_TOKEN = "UsernameToken";
public final static String USERNAME_TOKEN10 = "WssUsernameToken10";
public final static String USERNAME_TOKEN11 = "WssUsernameToken11";
public final static String KERBEROS_TOKEN = "KerberosToken";
public final static String TRANSPORT_TOKEN = "TransportToken";
public final static String HTTPS_TOKEN = "HttpsToken";
public final static QName REQUIRE_CLIENT_CERTIFICATE = new QName("RequireClientCertificate");
public final static QName HTTP_BASIC_AUTHENTICATION = new QName("HttpBasicAuthentication");
public final static QName HTTP_DIGEST_AUTHENTICATION = new QName("HttpDigestAuthentication");
public final static String SECURITY_CONTEXT_TOKEN = "SecurityContextToken";
public final static String SECURE_CONVERSATION_TOKEN = "SecureConversationToken";
public final static String ISSUED_TOKEN = "IssuedToken";
public final static String SIGNATURE_TOKEN = "SignatureToken";
public final static String ENCRYPTION_TOKEN = "EncryptionToken";
public final static String PROTECTION_TOKEN = "ProtectionToken";
public final static String INITIATOR_TOKEN = "InitiatorToken";
public final static String RECIPIENT_TOKEN = "RecipientToken";
public final static String SUPPORTING_TOKENS = "SupportingTokens";
public final static String SIGNED_SUPPORTING_TOKENS = "SignedSupportingTokens";
public final static String ENDORSING_SUPPORTING_TOKENS = "EndorsingSupportingTokens";
public final static String SIGNED_ENDORSING_SUPPORTING_TOKENS = "SignedEndorsingSupportingTokens";
public final static String ENCRYPTED_SUPPORTING_TOKENS = "EncryptedSupportingTokens";
public final static String SIGNED_ENCRYPTED_SUPPORTING_TOKENS = "SignedEncryptedSupportingTokens";
public final static String ENDORSING_ENCRYPTED_SUPPORTING_TOKENS = "EndorsingEncryptedSupportingTokens";
public final static String SIGNED_ENDORSING_ENCRYPTED_SUPPORTING_TOKENS = "SignedEndorsingEncryptedSupportingTokens";
public final static int SUPPORTING_TOKEN_SUPPORTING = 1;
public final static int SUPPORTING_TOKEN_ENDORSING = 2;
public final static int SUPPORTING_TOKEN_SIGNED = 3;
public final static int SUPPORTING_TOKEN_SIGNED_ENDORSING = 4;
public final static int SUPPORTING_TOKEN_SIGNED_ENCRYPTED = 5;
public final static int SUPPORTING_TOKEN_ENCRYPTED = 6;
public final static int SUPPORTING_TOKEN_ENDORSING_ENCRYPTED = 7;
public final static int SUPPORTING_TOKEN_SIGNED_ENDORSING_ENCRYPTED = 8;
// //////////////////////////////////////////////////////////////////////////////////////////////
public final static String ALGO_SUITE = "AlgorithmSuite";
// /
// /Algorithm Suites
// /
public final static String ALGO_SUITE_BASIC256 = "Basic256";
public final static String ALGO_SUITE_BASIC192 = "Basic192";
public final static String ALGO_SUITE_BASIC128 = "Basic128";
public final static String ALGO_SUITE_TRIPLE_DES = "TripleDes";
public final static String ALGO_SUITE_BASIC256_RSA15 = "Basic256Rsa15";
public final static String ALGO_SUITE_BASIC192_RSA15 = "Basic192Rsa15";
public final static String ALGO_SUITE_BASIC128_RSA15 = "Basic128Rsa15";
public final static String ALGO_SUITE_TRIPLE_DES_RSA15 = "TripleDesRsa15";
public final static String ALGO_SUITE_BASIC256_SHA256 = "Basic256Sha256";
public final static String ALGO_SUITE_BASIC192_SHA256 = "Basic192Sha256";
public final static String ALGO_SUITE_BASIC128_SHA256 = "Basic128Sha256";
public final static String ALGO_SUITE_TRIPLE_DES_SHA256 = "TripleDesSha256";
public final static String ALGO_SUITE_BASIC256_SHA256_RSA15 = "Basic256Sha256Rsa15";
public final static String ALGO_SUITE_BASIC192_SHA256_RSA15 = "Basic192Sha256Rsa15";
public final static String ALGO_SUITE_BASIC128_SHA256_RSA15 = "Basic128Sha256Rsa15";
public final static String ALGO_SUITE_TRIPLE_DES_SHA256_RSA15 = "TripleDesSha256Rsa15";
// /
// /Algorithms
// /
public final static String HMAC_SHA1 = "http://www.w3.org/2000/09/xmldsig#hmac-sha1";
public final static String RSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#rsa-sha1";
public final static String SHA1 = "http://www.w3.org/2000/09/xmldsig#sha1";
public final static String SHA256 = "http://www.w3.org/2001/04/xmlenc#sha256";
public final static String SHA512 = "http://www.w3.org/2001/04/xmlenc#sha512";
public final static String AES128 = "http://www.w3.org/2001/04/xmlenc#aes128-cbc";
public final static String AES192 = "http://www.w3.org/2001/04/xmlenc#aes192-cbc";
public final static String AES256 = "http://www.w3.org/2001/04/xmlenc#aes256-cbc";
public final static String TRIPLE_DES = "http://www.w3.org/2001/04/xmlenc#tripledes-cbc";
public final static String KW_AES128 = "http://www.w3.org/2001/04/xmlenc#kw-aes128";
public final static String KW_AES192 = "http://www.w3.org/2001/04/xmlenc#kw-aes192";
public final static String KW_AES256 = "http://www.w3.org/2001/04/xmlenc#kw-aes256";
public final static String KW_TRIPLE_DES = "http://www.w3.org/2001/04/xmlenc#kw-tripledes";
public final static String KW_RSA_OAEP = "http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p";
public final static String KW_RSA15 = "http://www.w3.org/2001/04/xmlenc#rsa-1_5";
public final static String P_SHA1 = "http://schemas.xmlsoap.org/ws/2005/02/sc/dk/p_sha1";
public final static String P_SHA1_L128 = "http://schemas.xmlsoap.org/ws/2005/02/sc/dk/p_sha1";
public final static String P_SHA1_L192 = "http://schemas.xmlsoap.org/ws/2005/02/sc/dk/p_sha1";
public final static String P_SHA1_L256 = "http://schemas.xmlsoap.org/ws/2005/02/sc/dk/p_sha1";
public final static String XPATH = "http://www.w3.org/TR/1999/REC-xpath-19991116";
public final static String XPATH20 = "http://www.w3.org/2002/06/xmldsig-filter2";
public final static String C14N = "http://www.w3.org/2001/10/xml-c14n#";
public final static String EX_C14N = "http://www.w3.org/2001/10/xml-exc-c14n#";
public final static String SNT = "http://www.w3.org/TR/soap12-n11n";
public final static String STRT10 = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#STR-Transform";
// /////////////////////////////////////////////////////////////////////////////////////////////
public static final String INCLUSIVE_C14N = "InclusiveC14N";
public static final String SOAP_NORMALIZATION_10 = "SoapNormalization10";
public static final String STR_TRANSFORM_10 = "STRTransform10";
public static final String XPATH10 = "XPath10";
public static final String XPATH_FILTER20 = "XPathFilter20";
// //////////////////////////////////////////////////////////////////////////////////////////////
public final static String ATTR_INCLUDE_TOKEN = "IncludeToken";
public static final String INCLUDE_TOKEN_NEVER_SUFFIX = "/IncludeToken/Never";
public static final String INCLUDE_TOKEN_ONCE_SUFFIX = "/IncludeToken/Once";
public static final String INCLUDE_TOEKN_ALWAYS_TO_RECIPIENT_SUFFIX = "/IncludeToken/AlwaysToRecipient";
public static final String INCLUDE_TOEKN_ALWAYS_TO_INITIATOR_SUFFIX = "/IncludeToken/AlwaysToInitiator";
public static final String INCLUDE_TOEKN_ALWAYS_SUFFIX = "/IncludeToken/Always";
public static final int INCLUDE_TOKEN_NEVER = 1;
public static final int INCLUDE_TOKEN_ONCE = 2;
public static final int INCLUDE_TOEKN_ALWAYS_TO_RECIPIENT = 3;
public static final int INCLUDE_TOEKN_ALWAYS_TO_INITIATOR = 4;
public static final int INCLUDE_TOEKN_ALWAYS = 5;
// //////////////////////////////////////////////////////////////////////////////////////////////
public static final String TRANSPORT_BINDING = "TransportBinding";
public static final String ASYMMETRIC_BINDING = "AsymmetricBinding";
public static final String SYMMETRIC_BINDING = "SymmetricBinding";
// //////////////////////////////////////////////////////////////////////////////////////////////
public static final String REQUIRE_KEY_IDENTIFIRE_REFERENCE = "RequireKeyIdentifierReference";
public static final String REQUIRE_ISSUER_SERIAL_REFERENCE = "RequireIssuerSerialReference";
public static final String REQUIRE_EMBEDDED_TOKEN_REFERENCE = "RequireEmbeddedTokenReference";
public static final String REQUIRE_THUMBPRINT_REFERENCE = "RequireThumbprintReference";
public static final String REQUIRE_SIGNATURE_CONFIRMATION = "RequireSignatureConfirmation";
// //////////////////////////////////////////////////////////////////////////////////////////////
public static final String MUST_SUPPORT_REF_KEY_IDENTIFIER = "MustSupportRefKeyIdentifier";
public static final String MUST_SUPPORT_REF_ISSUER_SERIAL = "MustSupportRefIssuerSerial";
public static final String MUST_SUPPORT_REF_EXTERNAL_URI = "MustSupportRefExternalURI";
public static final String MUST_SUPPORT_REF_EMBEDDED_TOKEN = "MustSupportRefEmbeddedToken";
public static final String MUST_SUPPORT_REF_THUMBPRINT = "MustSupportRefThumbprint";
public static final String MUST_SUPPORT_REF_ENCRYPTED_KEY = "MustSupportRefEncryptedkey";
// //////////////////////////////////////////////////////////////////////////////////////////////
public static final String WSS10 = "Wss10";
public static final String WSS11 = "Wss11";
// //////////////////////////////////////////////////////////////////////////////////////////////
public static final String TRUST_10 = "Trust10";
public static final String TRUST_13 = "Trust13";
public static final String MUST_SUPPORT_CLIENT_CHALLENGE = "MustSupportClientChanllenge";
public static final String MUST_SUPPORT_SERVER_CHALLENGE = "MustSupportServerChanllenge";
public static final String REQUIRE_CLIENT_ENTROPY = "RequireClientEntropy";
public static final String REQUIRE_SERVER_ENTROPY = "RequireServerEntropy";
public static final String MUST_SUPPORT_ISSUED_TOKENS = "MustSupportIssuedTokens";
public static final String REQUIRE_REQUEST_SECURITY_TOKEN_COLLECTION = "RequireRequestSecurityTokenCollection";
public static final String REQUIRE_APPLIES_TO = "RequireAppliesTo";
public static final String ISSUER = "Issuer";
public static final String REQUIRE_DERIVED_KEYS = "RequireDerivedKeys";
public static final String REQUIRE_IMPLIED_DERIVED_KEYS = "RequireImpliedDerivedKeys";
public static final String REQUIRE_EXPLICIT_DERIVED_KEYS = "RequireExplicitDerivedKeys";
public static final String REQUIRE_EXTERNAL_URI_REFERNCE = "RequireExternalUriReference";
public static final String REQUIRE_EXTERNAL_REFERNCE = "RequireExternalReference";
public static final String REQUIRE_INTERNAL_REFERNCE = "RequireInternalReference";
public static final String REQUEST_SECURITY_TOKEN_TEMPLATE = "RequestSecurityTokenTemplate";
public static final String REQUEST_SECURITY_TOKEN_TEMPLATE_TOKEN_TYPE = "TokenType";
public static final String REQUEST_SECURITY_TOKEN_TEMPLATE_CLAIMS = "Claims";
public static final String REQUEST_SECURITY_TOKEN_TEMPLATE_CLAIM_TYPE = "ClaimType";
public static final String REQUEST_SECURITY_TOKEN_TEMPLATE_CLAIM_TYPE_URI = "Uri";
public static final String REQUEST_SECURITY_TOKEN_TEMPLATE_CLAIM_TYPE_OPTIONAL = "Optional";
public static final String SC10_SECURITY_CONTEXT_TOKEN = "SC10SecurityContextToken";
public static final String BOOTSTRAP_POLICY = "BootstrapPolicy";
// //////////////////////////////////////////////////////////////////////////////////////////////
public static final String HEADER = "Header";
public static final String BODY = "Body";
public static final String ATTACHMENTS = "Attachments";
public static final QName NAME = new QName("Name");
public static final QName NAMESPACE = new QName("Namespace");
// //////////////////////////////////////////////////////////////////////////////////////////////
public static final String NO_PASSWORD = "NoPassword";
public static final String HASH_PASSWORD = "HashPassword";
// //////////////////////////////////////////////////////////////////////////////////////////////
public static final String REQUIRE_KERBEROS_V5_TOKEN_11 = "WssKerberosV5ApReqToken11";
public static final String REQUIRE_KERBEROS_GSS_V5_TOKEN_11 = "WssGssKerberosV5ApReqToken11";
}
|
|
/*
* This file is part of the Heritrix web crawler (crawler.archive.org).
*
* Licensed to the Internet Archive (IA) by one or more individual
* contributors.
*
* The IA licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.archive.io.arc;
import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.archive.io.ArchiveRecordHeader;
/**
* An immutable class to hold an ARC record meta data.
*
* @author stack
*/
public class ARCRecordMetaData implements ArchiveRecordHeader, ARCConstants {
/**
* Map of record header fields.
*
* We store all in a hashmap. This way we can hold version 1 or
* version 2 record meta data.
*
* <p>Keys are lowercase.
*/
protected Map<String,Object> headerFields = null;
/**
* Digest for the record.
*
* Only available after the record has been read in totality.
*/
private String digest = null;
/**
* Status for this request.
*
* There may be no status.
*/
private String statusCode = null;
/**
* The arc this metadata came out.
* Descriptive String, either path or URL.
*/
private String arc = null;
private int contentBegin = 0;
/**
* Shut down the default constructor.
*/
protected ARCRecordMetaData() {
super();
}
/**
* Constructor.
*
* @param arc The arc file this metadata came out of.
* @param headerFields Hash of meta fields.
*
* @throws IOException
*/
public ARCRecordMetaData(final String arc, Map<String,Object> headerFields)
throws IOException {
// Make sure the minimum required fields are present,
for (Iterator<String> i = REQUIRED_VERSION_1_HEADER_FIELDS.iterator();
i.hasNext(); ) {
testRequiredField(headerFields, (String)i.next());
}
this.headerFields = headerFields;
this.arc = arc;
}
/**
* Test required field is present in hash.
*
* @param fields Map of fields.
* @param requiredField Field to test for.
*
* @exception IOException If required field is not present.
*/
protected void testRequiredField(Map<String,Object> fields, String requiredField)
throws IOException {
if (!fields.containsKey(requiredField)) {
throw new IOException("Required field " + requiredField +
" not in meta data.");
}
}
/**
* Get the time when the record was harvested.
* <p>
* Returns the date in Heritrix 14 digit time format (UTC). See the
* {@link org.archive.util.ArchiveUtils} class for converting to Java
* dates.
*
* @return Header date in Heritrix 14 digit format.
* @see org.archive.util.ArchiveUtils#parse14DigitDate(String)
*/
public String getDate() {
return (String) this.headerFields.get(DATE_FIELD_KEY);
}
/**
* @return Return length of the record.
*/
public long getLength() {
return Long.parseLong((String)this.headerFields.
get(LENGTH_FIELD_KEY));
}
/**
* @return Return Content-Length of the contents of the record
* Same as record length for arcs? TODO
*/
public long getContentLength() {
return getLength();
}
/**
* @return Header url.
*/
public String getUrl() {
return (String)this.headerFields.get(URL_FIELD_KEY);
}
/**
* @return IP.
*/
public String getIp()
{
return (String)this.headerFields.get(IP_HEADER_FIELD_KEY);
}
/**
* @return mimetype The mimetype that is in the ARC metaline -- NOT the http
* content-type content.
*/
public String getMimetype() {
return (String)this.headerFields.get(MIMETYPE_FIELD_KEY);
}
/**
* @return Arcfile version.
*/
public String getVersion() {
return (String)this.headerFields.get(VERSION_FIELD_KEY);
}
/**
* @return Arcfile origin code.
*/
public String getOrigin() {
return (String)this.headerFields.get(ORIGIN_FIELD_KEY);
}
/**
* @return Offset into arcfile at which this record begins.
*/
public long getOffset() {
return ((Long)this.headerFields.get(ABSOLUTE_OFFSET_KEY)).longValue();
}
/**
* @param key Key to use looking up field value.
* @return value for passed key of null if no such entry.
*/
public Object getHeaderValue(String key) {
return this.headerFields.get(key);
}
/**
* @return Header field name keys.
*/
public Set<String> getHeaderFieldKeys()
{
return this.headerFields.keySet();
}
/**
* @return Map of header fields.
*/
public Map<String,Object> getHeaderFields() {
return this.headerFields;
}
/**
* @return Returns identifier for ARC.
*/
public String getArc() {
return this.arc;
}
/**
* @return Convenience method that does a
* return new File(this.arc) (Be aware this.arc is not always
* full path to an ARC file -- may be an URL). Test
* returned file for existence.
*/
public File getArcFile() {
return new File(this.arc);
}
/**
* @return Returns the digest.
*/
public String getDigest() {
return this.digest;
}
/**
* @param d The digest to set.
*/
public void setDigest(String d) {
this.digest = d;
}
/**
* @return Returns the statusCode. May be null.
*/
public String getStatusCode() {
return this.statusCode;
}
/**
* @param statusCode The statusCode to set.
*/
public void setStatusCode(String statusCode) {
this.statusCode = statusCode;
}
public String toString() {
return ((this.arc != null)? this.arc: "") +
": " +
((this.headerFields != null)? this.headerFields.toString(): "");
}
public String getReaderIdentifier() {
return this.getArc();
}
public String getRecordIdentifier() {
return getDate() + "/" + getUrl();
}
public int getContentBegin() {
return this.contentBegin;
}
protected void setContentBegin(final int offset) {
this.contentBegin = offset;
}
}
|
|
package de.ayesolutions.gogs.client.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Date;
/**
* repository model class.
*
* @author Christian Aye - c.aye@aye-solutions.de
*/
public class Repository {
/**
* repository permission class.
*/
public class RepositoryPermission {
private boolean admin;
private boolean push;
private boolean pull;
public boolean isAdmin() {
return admin;
}
public void setAdmin(boolean admin) {
this.admin = admin;
}
public boolean isPush() {
return push;
}
public void setPush(boolean push) {
this.push = push;
}
public boolean isPull() {
return pull;
}
public void setPull(boolean pull) {
this.pull = pull;
}
}
private Long id;
private User owner;
private String name;
@JsonProperty("full_name")
private String fullName;
private String description;
@JsonProperty("private")
private Boolean privateRepository;
private Boolean fork;
@JsonProperty("html_url")
private String htmlUrl;
@JsonProperty("ssh_url")
private String sshUrl;
@JsonProperty("clone_url")
private String cloneUrl;
private String website;
@JsonProperty("stars_count")
private Integer starsCount;
@JsonProperty("forks_count")
private Integer forks;
@JsonProperty("watchers_count")
private Integer watchers;
@JsonProperty("open_issues_count")
private Integer openIssues;
@JsonProperty("default_branch")
private String defaultBranch;
@JsonProperty("created_at")
private Date created;
@JsonProperty("updated_at")
private Date updated;
private RepositoryPermission permissions;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public User getOwner() {
return owner;
}
public void setOwner(User owner) {
this.owner = owner;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getFullName() {
return fullName;
}
public void setFullName(String fullName) {
this.fullName = fullName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Boolean getPrivateRepository() {
return privateRepository;
}
public void setPrivateRepository(Boolean privateRepository) {
this.privateRepository = privateRepository;
}
public Boolean getFork() {
return fork;
}
public void setFork(Boolean fork) {
this.fork = fork;
}
public String getHtmlUrl() {
return htmlUrl;
}
public void setHtmlUrl(String htmlUrl) {
this.htmlUrl = htmlUrl;
}
public String getSshUrl() {
return sshUrl;
}
public void setSshUrl(String sshUrl) {
this.sshUrl = sshUrl;
}
public String getCloneUrl() {
return cloneUrl;
}
public void setCloneUrl(String cloneUrl) {
this.cloneUrl = cloneUrl;
}
public String getWebsite() {
return website;
}
public void setWebsite(String website) {
this.website = website;
}
public Integer getStarsCount() {
return starsCount;
}
public void setStarsCount(Integer starsCount) {
this.starsCount = starsCount;
}
public Integer getForks() {
return forks;
}
public void setForks(Integer forks) {
this.forks = forks;
}
public Integer getWatchers() {
return watchers;
}
public void setWatchers(Integer watchers) {
this.watchers = watchers;
}
public Integer getOpenIssues() {
return openIssues;
}
public void setOpenIssues(Integer openIssues) {
this.openIssues = openIssues;
}
public String getDefaultBranch() {
return defaultBranch;
}
public void setDefaultBranch(String defaultBranch) {
this.defaultBranch = defaultBranch;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
public Date getUpdated() {
return updated;
}
public void setUpdated(Date updated) {
this.updated = updated;
}
public RepositoryPermission getPermissions() {
return permissions;
}
public void setPermissions(RepositoryPermission permissions) {
this.permissions = permissions;
}
}
|
|
/*
* Copyright 2016 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.cdap.http.internal;
import io.netty.util.concurrent.AbstractEventExecutor;
import io.netty.util.concurrent.EventExecutor;
import io.netty.util.concurrent.EventExecutorGroup;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.OrderedEventExecutor;
import io.netty.util.concurrent.ScheduledFuture;
import io.netty.util.internal.ObjectUtil;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.UnstableApi;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
/**
* {@link EventExecutorGroup} which will preserve {@link Runnable} execution order but makes no guarantees about what
* {@link EventExecutor} (and therefore {@link Thread}) will be used to execute the {@link Runnable}s.
*
* <p>The {@link EventExecutorGroup#next()} for the wrapped {@link EventExecutorGroup} must <strong>NOT</strong> return
* executors of type {@link OrderedEventExecutor}.
*
* NOTE: This class is copied from the netty project to fix the netty bug #8230. This class should be removed
* after the fix goes in the netty library
*/
@UnstableApi
public final class NonStickyEventExecutorGroup implements EventExecutorGroup {
private final EventExecutorGroup group;
private final int maxTaskExecutePerRun;
/**
* Creates a new instance. Be aware that the given {@link EventExecutorGroup} <strong>MUST NOT</strong> contain
* any {@link OrderedEventExecutor}s.
*/
public NonStickyEventExecutorGroup(EventExecutorGroup group) {
this(group, 1024);
}
/**
* Creates a new instance. Be aware that the given {@link EventExecutorGroup} <strong>MUST NOT</strong> contain
* any {@link OrderedEventExecutor}s.
*/
public NonStickyEventExecutorGroup(EventExecutorGroup group, int maxTaskExecutePerRun) {
this.group = verify(group);
this.maxTaskExecutePerRun = ObjectUtil.checkPositive(maxTaskExecutePerRun, "maxTaskExecutePerRun");
}
private static EventExecutorGroup verify(EventExecutorGroup group) {
Iterator<EventExecutor> executors = ObjectUtil.checkNotNull(group, "group").iterator();
while (executors.hasNext()) {
EventExecutor executor = executors.next();
if (executor instanceof OrderedEventExecutor) {
throw new IllegalArgumentException("EventExecutorGroup " + group
+ " contains OrderedEventExecutors: " + executor);
}
}
return group;
}
private NonStickyOrderedEventExecutor newExecutor(EventExecutor executor) {
return new NonStickyOrderedEventExecutor(executor, maxTaskExecutePerRun);
}
@Override
public boolean isShuttingDown() {
return group.isShuttingDown();
}
@Override
public Future<?> shutdownGracefully() {
return group.shutdownGracefully();
}
@Override
public Future<?> shutdownGracefully(long quietPeriod, long timeout, TimeUnit unit) {
return group.shutdownGracefully(quietPeriod, timeout, unit);
}
@Override
public Future<?> terminationFuture() {
return group.terminationFuture();
}
@SuppressWarnings("deprecation")
@Override
public void shutdown() {
group.shutdown();
}
@SuppressWarnings("deprecation")
@Override
public List<Runnable> shutdownNow() {
return group.shutdownNow();
}
@Override
public EventExecutor next() {
return newExecutor(group.next());
}
@Override
public Iterator<EventExecutor> iterator() {
final Iterator<EventExecutor> itr = group.iterator();
return new Iterator<EventExecutor>() {
@Override
public boolean hasNext() {
return itr.hasNext();
}
@Override
public EventExecutor next() {
return newExecutor(itr.next());
}
@Override
public void remove() {
itr.remove();
}
};
}
@Override
public Future<?> submit(Runnable task) {
return group.submit(task);
}
@Override
public <T> Future<T> submit(Runnable task, T result) {
return group.submit(task, result);
}
@Override
public <T> Future<T> submit(Callable<T> task) {
return group.submit(task);
}
@Override
public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) {
return group.schedule(command, delay, unit);
}
@Override
public <V> ScheduledFuture<V> schedule(Callable<V> callable, long delay, TimeUnit unit) {
return group.schedule(callable, delay, unit);
}
@Override
public ScheduledFuture<?> scheduleAtFixedRate(Runnable command, long initialDelay, long period, TimeUnit unit) {
return group.scheduleAtFixedRate(command, initialDelay, period, unit);
}
@Override
public ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, long initialDelay, long delay, TimeUnit unit) {
return group.scheduleWithFixedDelay(command, initialDelay, delay, unit);
}
@Override
public boolean isShutdown() {
return group.isShutdown();
}
@Override
public boolean isTerminated() {
return group.isTerminated();
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
return group.awaitTermination(timeout, unit);
}
@Override
public <T> List<java.util.concurrent.Future<T>> invokeAll(
Collection<? extends Callable<T>> tasks) throws InterruptedException {
return group.invokeAll(tasks);
}
@Override
public <T> List<java.util.concurrent.Future<T>> invokeAll(
Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) throws InterruptedException {
return group.invokeAll(tasks, timeout, unit);
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks) throws InterruptedException, ExecutionException {
return group.invokeAny(tasks);
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
return group.invokeAny(tasks, timeout, unit);
}
@Override
public void execute(Runnable command) {
group.execute(command);
}
private static final class NonStickyOrderedEventExecutor extends AbstractEventExecutor
implements Runnable, OrderedEventExecutor {
private final EventExecutor executor;
private final Queue<Runnable> tasks = PlatformDependent.newMpscQueue();
private static final int NONE = 0;
private static final int SUBMITTED = 1;
private static final int RUNNING = 2;
private final AtomicInteger state = new AtomicInteger();
private final int maxTaskExecutePerRun;
NonStickyOrderedEventExecutor(EventExecutor executor, int maxTaskExecutePerRun) {
super(executor);
this.executor = executor;
this.maxTaskExecutePerRun = maxTaskExecutePerRun;
}
@Override
public void run() {
if (!state.compareAndSet(SUBMITTED, RUNNING)) {
return;
}
for (;;) {
int i = 0;
try {
for (; i < maxTaskExecutePerRun; i++) {
Runnable task = tasks.poll();
if (task == null) {
break;
}
safeExecute(task);
}
} finally {
if (i == maxTaskExecutePerRun) {
try {
state.set(SUBMITTED);
executor.execute(this);
return; // done
} catch (Throwable ignore) {
// Reset the state back to running as we will keep on executing tasks.
state.set(RUNNING);
// if an error happened we should just ignore it and let the loop run again as there is not
// much else we can do. Most likely this was triggered by a full task queue. In this case
// we just will run more tasks and try again later.
}
} else {
state.set(NONE);
// After setting the state to NONE, look at the tasks queue one more time.
// If it is empty, then we can return from this method.
// Otherwise, it means the producer thread has called execute(Runnable)
// and enqueued a task in between the tasks.poll() above and the state.set(NONE) here.
// There are two possible scenarios when this happen
//
// 1. The producer thread sees state == NONE, hence the compareAndSet(NONE, SUBMITTED)
// is successfully setting the state to SUBMITTED. This mean the producer
// will call / has called executor.execute(this). In this case, we can just return.
// 2. The producer thread don't see the state change, hence the compareAndSet(NONE, SUBMITTED)
// returns false. In this case, the producer thread won't call executor.execute.
// In this case, we need to change the state to RUNNING and keeps running.
//
// The above cases can be distinguished by performing a
// compareAndSet(NONE, RUNNING). If it returns "false", it is case 1; otherwise it is case 2.
if (tasks.isEmpty() || !state.compareAndSet(NONE, RUNNING)) {
return; // done
}
}
}
}
}
@Override
public boolean inEventLoop(Thread thread) {
return false;
}
@Override
public boolean inEventLoop() {
return false;
}
@Override
public boolean isShuttingDown() {
return executor.isShutdown();
}
@Override
public Future<?> shutdownGracefully(long quietPeriod, long timeout, TimeUnit unit) {
return executor.shutdownGracefully(quietPeriod, timeout, unit);
}
@Override
public Future<?> terminationFuture() {
return executor.terminationFuture();
}
@Override
public void shutdown() {
executor.shutdown();
}
@Override
public boolean isShutdown() {
return executor.isShutdown();
}
@Override
public boolean isTerminated() {
return executor.isTerminated();
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
return executor.awaitTermination(timeout, unit);
}
@Override
public void execute(Runnable command) {
if (!tasks.offer(command)) {
throw new RejectedExecutionException();
}
if (state.compareAndSet(NONE, SUBMITTED)) {
// Actually it could happen that the runnable was picked up in between but we not care to much and just
// execute ourself. At worst this will be a NOOP when run() is called.
try {
executor.execute(this);
} catch (Throwable e) {
// Not reset the state as some other Runnable may be added to the queue already in the meantime.
tasks.remove(command);
PlatformDependent.throwException(e);
}
}
}
}
}
|
|
package alien4cloud.paas.cloudify2.generator;
import static alien4cloud.paas.cloudify2.generator.AlienEnvironmentVariables.SERVICE_NAME;
import static alien4cloud.paas.cloudify2.generator.AlienEnvironmentVariables.SOURCE_NAME;
import static alien4cloud.paas.cloudify2.generator.AlienEnvironmentVariables.SOURCE_SERVICE_NAME;
import static alien4cloud.paas.cloudify2.generator.AlienEnvironmentVariables.TARGET_NAME;
import static alien4cloud.paas.cloudify2.generator.AlienEnvironmentVariables.TARGET_SERVICE_NAME;
import static alien4cloud.paas.cloudify2.generator.RecipeGeneratorConstants.SCRIPTS;
import static alien4cloud.paas.cloudify2.generator.RecipeGeneratorConstants.SCRIPT_LIFECYCLE;
import static alien4cloud.tosca.normative.ToscaFunctionConstants.HOST;
import static alien4cloud.tosca.normative.ToscaFunctionConstants.SELF;
import java.io.IOException;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.annotation.Resource;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.springframework.context.ApplicationContext;
import alien4cloud.common.AlienConstants;
import alien4cloud.model.components.ConcatPropertyValue;
import alien4cloud.model.components.FunctionPropertyValue;
import alien4cloud.model.components.IValue;
import alien4cloud.model.components.ImplementationArtifact;
import alien4cloud.model.components.IndexedArtifactToscaElement;
import alien4cloud.model.components.IndexedToscaElement;
import alien4cloud.model.components.Interface;
import alien4cloud.model.components.Operation;
import alien4cloud.model.components.OperationOutput;
import alien4cloud.paas.IPaaSTemplate;
import alien4cloud.paas.cloudify2.AlienExtentedConstants;
import alien4cloud.paas.cloudify2.ProviderLogLevel;
import alien4cloud.paas.cloudify2.funtion.FunctionProcessor;
import alien4cloud.paas.cloudify2.utils.CloudifyPaaSUtils;
import alien4cloud.paas.cloudify2.utils.VelocityUtil;
import alien4cloud.paas.model.PaaSNodeTemplate;
import alien4cloud.paas.model.PaaSRelationshipTemplate;
import alien4cloud.paas.plan.ToscaRelationshipLifecycleConstants;
import alien4cloud.tosca.normative.ToscaFunctionConstants;
import alien4cloud.utils.AlienUtils;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
abstract class AbstractCloudifyScriptGenerator {
@Resource
protected FunctionProcessor funtionProcessor;
@Resource
protected RecipeGeneratorArtifactCopier artifactCopier;
@Resource
protected CommandGenerator commandGenerator;
@Resource
ApplicationContext applicationContext;
private static final String MAP_TO_ADD_KEYWORD = "MAP_TO_ADD_";
protected String getOperationCommandFromInterface(final RecipeGeneratorServiceContext context, final PaaSNodeTemplate nodeTemplate,
final String interfaceName, final String operationName, ExecEnvMaps envMaps, ProviderLogLevel logLevel) throws IOException {
String command = null;
Interface interfaz = nodeTemplate.getIndexedToscaElement().getInterfaces().get(interfaceName);
if (interfaz != null) {
Operation operation = interfaz.getOperations().get(operationName);
if (operation != null) {
command = prepareAndGetCommand(context, nodeTemplate, interfaceName, operationName, envMaps, operation, logLevel);
}
}
return command;
}
protected String prepareAndGetCommand(final RecipeGeneratorServiceContext context, final IPaaSTemplate<? extends IndexedArtifactToscaElement> paaSTemplate,
final String interfaceName, final String operationName, ExecEnvMaps envMaps, Operation operation, ProviderLogLevel logLevel) throws IOException {
String command;
OperationResume operationResume = getOperationResume(interfaceName, operationName, operation);
command = getCommandFromOperation(context, paaSTemplate, operationResume, null, envMaps, logLevel);
if (StringUtils.isNotBlank(command)) {
this.artifactCopier.copyImplementationArtifact(context, paaSTemplate.getCsarPath(), operation.getImplementationArtifact(),
paaSTemplate.getIndexedToscaElement());
}
return command;
}
protected String getCommandFromOperation(final RecipeGeneratorServiceContext context, final IPaaSTemplate<? extends IndexedToscaElement> basePaaSTemplate,
final OperationResume operationResume, String instanceId, ExecEnvMaps envMaps, ProviderLogLevel logLevel) throws IOException {
if (operationResume.artifact == null || StringUtils.isBlank(operationResume.artifact.getArtifactRef())) {
return null;
}
// if relationship, add relationship env vars
if (basePaaSTemplate instanceof PaaSRelationshipTemplate) {
addRelationshipEnvVars(operationResume.operationName, operationResume.inputParameters, (PaaSRelationshipTemplate) basePaaSTemplate,
context.getAllNodes(), instanceId, envMaps);
} else {
addNodeEnvVars(context, (PaaSNodeTemplate) basePaaSTemplate, instanceId, operationResume.inputParameters, envMaps, SELF, HOST, SERVICE_NAME);
}
String relativePath = CloudifyPaaSUtils.getNodeTypeRelativePath(basePaaSTemplate.getIndexedToscaElement());
String scriptPath = relativePath + "/" + operationResume.artifact.getArtifactRef();
// nodeId:interface:operation
String operationFQN = AlienUtils.prefixWith(AlienConstants.OPERATION_NAME_SEPARATOR, operationResume.operationName,
new String[] { basePaaSTemplate.getId(), operationResume.interfaceName });
return commandGenerator.getCommandBasedOnArtifactType(operationFQN, operationResume.artifact, envMaps.runtimes, envMaps.strings,
operationResume.outputs, scriptPath, logLevel);
}
private void addRelationshipEnvVars(String operationName, Map<String, IValue> inputParameters, PaaSRelationshipTemplate relShipPaaSTemplate,
Map<String, PaaSNodeTemplate> builtPaaSTemplates, String instanceId, ExecEnvMaps envMaps) throws IOException {
Map<String, String> sourceAttributes = Maps.newHashMap();
Map<String, String> targetAttributes = Maps.newHashMap();
PaaSNodeTemplate sourcePaaSTemplate = builtPaaSTemplates.get(relShipPaaSTemplate.getSource());
PaaSNodeTemplate targetPaaSTemplate = builtPaaSTemplates.get(relShipPaaSTemplate.getRelationshipTemplate().getTarget());
// for some cases we need to use a value provided in the velocity template.
// for example for relationship add_source, the source ip_address and instanceId are var provided in the velocity script.
// The target ip_address and instanceId will remain unchanged and handled by the default routine
String sourceInstanceId = getProperValueForRelEnvsBuilding(operationName, ToscaFunctionConstants.SOURCE, instanceId);
String sourceIpAddrVar = getProperValueForRelEnvsBuilding(operationName, ToscaFunctionConstants.SOURCE, "ip_address");
String sourceId = CloudifyPaaSUtils.serviceIdFromNodeTemplateId(sourcePaaSTemplate.getId());
String sourceServiceName = CloudifyPaaSUtils.cfyServiceNameFromNodeTemplate(sourcePaaSTemplate);
String targetInstanceId = getProperValueForRelEnvsBuilding(operationName, ToscaFunctionConstants.TARGET, instanceId);
String targetIpAddrVar = getProperValueForRelEnvsBuilding(operationName, ToscaFunctionConstants.TARGET, "ip_address");
String targetId = CloudifyPaaSUtils.serviceIdFromNodeTemplateId(targetPaaSTemplate.getId());
String targetServiceName = CloudifyPaaSUtils.cfyServiceNameFromNodeTemplate(targetPaaSTemplate);
// separate parameters using TARGET and SOURCE keywords before processing them
if (inputParameters != null) {
Map<String, IValue> sourceAttrParams = Maps.newHashMap();
Map<String, IValue> targetAttrParams = Maps.newHashMap();
Map<String, IValue> simpleParams = Maps.newHashMap();
for (Entry<String, IValue> paramEntry : inputParameters.entrySet()) {
if (!paramEntry.getValue().isDefinition() && !(paramEntry.getValue() instanceof ConcatPropertyValue)) {
FunctionPropertyValue param = (FunctionPropertyValue) paramEntry.getValue();
switch (param.getFunction()) {
case ToscaFunctionConstants.GET_ATTRIBUTE:
case ToscaFunctionConstants.GET_OPERATION_OUTPUT:
if (ToscaFunctionConstants.TARGET.equals(param.getTemplateName())) {
targetAttrParams.put(paramEntry.getKey(), param);
targetAttributes.put(paramEntry.getKey(), param.getElementNameToFetch());
} else if (ToscaFunctionConstants.SOURCE.equals(param.getTemplateName())) {
sourceAttrParams.put(paramEntry.getKey(), param);
sourceAttributes.put(paramEntry.getKey(), param.getElementNameToFetch());
} else {
simpleParams.put(paramEntry.getKey(), param);
}
break;
default:
simpleParams.put(paramEntry.getKey(), paramEntry.getValue());
break;
}
}
}
// evaluate params
funtionProcessor.processParameters(simpleParams, envMaps.strings, envMaps.runtimes, relShipPaaSTemplate, builtPaaSTemplates, null);
funtionProcessor.processParameters(sourceAttrParams, envMaps.strings, envMaps.runtimes, relShipPaaSTemplate, builtPaaSTemplates, sourceInstanceId);
funtionProcessor.processParameters(targetAttrParams, envMaps.strings, envMaps.runtimes, relShipPaaSTemplate, builtPaaSTemplates, targetInstanceId);
// override ip attributes' way of getting if needed
overrideIpAttributesIfNeeded(sourceAttributes, envMaps.runtimes, sourceIpAddrVar);
overrideIpAttributesIfNeeded(targetAttributes, envMaps.runtimes, targetIpAddrVar);
}
// custom alien env vars
envMaps.strings.put(SOURCE_NAME, sourcePaaSTemplate.getId());
envMaps.strings.put(TARGET_NAME, targetPaaSTemplate.getId());
envMaps.strings.put(SOURCE_SERVICE_NAME, CloudifyPaaSUtils.cfyServiceNameFromNodeTemplate(sourcePaaSTemplate));
envMaps.strings.put(TARGET_SERVICE_NAME, CloudifyPaaSUtils.cfyServiceNameFromNodeTemplate(targetPaaSTemplate));
// TOSCA SOURCE/SOURCES and TARGET/TARGETS
envMaps.runtimes.put(MAP_TO_ADD_KEYWORD + ToscaFunctionConstants.SOURCE, commandGenerator.getTOSCARelationshipEnvsCommand(
ToscaFunctionConstants.SOURCE, sourceId, sourceServiceName, sourceInstanceId, sourceAttributes,
sourceAttributes.isEmpty() ? null : Lists.newArrayList(sourcePaaSTemplate.getId())));
envMaps.runtimes.put(MAP_TO_ADD_KEYWORD + ToscaFunctionConstants.TARGET, commandGenerator.getTOSCARelationshipEnvsCommand(
ToscaFunctionConstants.TARGET, targetId, targetServiceName, targetInstanceId, targetAttributes,
targetAttributes.isEmpty() ? null : Lists.newArrayList(targetPaaSTemplate.getId())));
}
private void overrideIpAttributesIfNeeded(Map<String, String> attributes, Map<String, String> evaluated, String overrideValue) {
if (overrideValue != null) {
for (Entry<String, String> attrEntry : attributes.entrySet()) {
if (attrEntry.getValue().equals(AlienExtentedConstants.IP_ADDRESS) && evaluated.containsKey(attrEntry.getKey())) {
evaluated.put(attrEntry.getKey(), overrideValue);
}
}
}
}
private String getProperValueForRelEnvsBuilding(String operationName, String member, String defaultValue) {
switch (operationName) {
case ToscaRelationshipLifecycleConstants.ADD_TARGET:
case ToscaRelationshipLifecycleConstants.REMOVE_TARGET:
return member.equals(ToscaFunctionConstants.SOURCE) ? null : defaultValue;
case ToscaRelationshipLifecycleConstants.ADD_SOURCE:
case ToscaRelationshipLifecycleConstants.REMOVE_SOURCE:
return member.equals(ToscaFunctionConstants.TARGET) ? null : defaultValue;
default:
return null;
}
}
protected void generateScriptWorkflow(final Path servicePath, final Path velocityDescriptorPath, final String lifecycle, final List<String> executions,
final Map<String, ? extends Object> additionalPropeties) throws IOException {
Path outputPath = servicePath.resolve(lifecycle + ".groovy");
Map<String, Object> properties = Maps.newHashMap();
properties.put(SCRIPT_LIFECYCLE, lifecycle);
properties.put(SCRIPTS, executions);
properties = alien4cloud.utils.CollectionUtils.merge(additionalPropeties, properties, true);
VelocityUtil.writeToOutputFile(velocityDescriptorPath, outputPath, properties);
}
private void addNodeEnvVars(final RecipeGeneratorServiceContext context, final PaaSNodeTemplate nodeTemplate, final String instanceId,
Map<String, IValue> inputParameters, ExecEnvMaps envMaps, String... envKeys) throws IOException {
funtionProcessor.processParameters(inputParameters, envMaps.strings, envMaps.runtimes, nodeTemplate, context.getAllNodes(), instanceId);
if (envKeys != null) {
for (String envKey : envKeys) {
switch (envKey) {
case SELF:
envMaps.strings.put(envKey, nodeTemplate.getId());
break;
case HOST:
envMaps.strings.put(envKey, nodeTemplate.getParent() == null ? null : nodeTemplate.getParent().getId());
break;
case SERVICE_NAME:
envMaps.strings.put(envKey, CloudifyPaaSUtils.cfyServiceNameFromNodeTemplate(nodeTemplate));
break;
default:
break;
}
}
}
}
@AllArgsConstructor
@NoArgsConstructor
protected class ExecEnvMaps {
Map<String, String> strings = Maps.newHashMap();
Map<String, String> runtimes = Maps.newHashMap();
}
@AllArgsConstructor
@NoArgsConstructor
protected class OperationResume {
String interfaceName;
String operationName;
ImplementationArtifact artifact;
Map<String, IValue> inputParameters;
Map<String, Set<String>> outputs;
OperationResume(String interfaceName, String operationName, ImplementationArtifact artifact, Map<String, IValue> inputParameters,
Set<OperationOutput> outputsSet) {
this(interfaceName, operationName, artifact, inputParameters, toOutputMap(outputsSet));
}
}
private OperationResume getOperationResume(String interfaceName, String operationName, Operation operation) {
return new OperationResume(interfaceName, operationName, operation.getImplementationArtifact(), operation.getInputParameters(),
toOutputMap(operation.getOutputs()));
}
protected Map<String, Set<String>> toOutputMap(Set<OperationOutput> outputsSet) {
Map<String, Set<String>> outputsAsMap = Maps.newHashMap();
if (outputsSet != null) {
for (OperationOutput output : outputsSet) {
outputsAsMap.put(output.getName(), output.getRelatedAttributes());
}
}
return outputsAsMap;
}
}
|
|
package org.jtheque.films.services.impl.utils.web.analyzers;
/*
* Copyright JTheque (Baptiste Wicht)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.jtheque.core.managers.Managers;
import org.jtheque.core.managers.beans.IBeansManager;
import org.jtheque.films.IFilmsModule;
import org.jtheque.films.services.able.IActorService;
import org.jtheque.films.services.able.INotesService;
import org.jtheque.films.services.able.IRealizersService;
import org.jtheque.primary.od.able.Person;
import org.jtheque.primary.od.able.SimpleData;
import org.jtheque.primary.services.able.ISimpleDataService;
import org.jtheque.primary.utils.web.analyzers.generic.GenericGenerator;
import org.jtheque.primary.utils.web.analyzers.generic.field.FieldGetter;
import org.jtheque.primary.utils.web.analyzers.generic.operation.ScannerPossessor;
import org.jtheque.utils.StringUtils;
import org.jtheque.utils.bean.DataUtils;
import javax.annotation.Resource;
import java.util.Scanner;
import java.util.regex.Pattern;
/**
* A generic film analyzer. It seems an analyzer who takes its parser information from an XML file.
*
* @author Baptiste Wicht
*/
public final class GenericFilmAnalyzer extends AbstractFilmAnalyzer implements ScannerPossessor {
@Resource
private ISimpleDataService kindsService;
@Resource
private IRealizersService realizersService;
@Resource
private INotesService notesService;
@Resource
private ISimpleDataService countriesService;
@Resource
private IActorService actorService;
@Resource
private IFilmsModule filmsModule;
/**
* The generator of the field getters.
*/
private final GenericGenerator generator;
private FieldGetter dateGetter;
private FieldGetter durationGetter;
private FieldGetter imageGetter;
private FieldGetter kindGetter;
private FieldGetter realizerGetter;
private FieldGetter resumeGetter;
private FieldGetter actorsGetter;
private static final int SECONDS_IN_A_MINUTE = 60;
private static final Pattern ACTOR_SEPARATOR_PATTERN = Pattern.compile("%%%");
private static final Pattern HOUR_SEPARATOR_PATTERN = Pattern.compile("h");
/**
* Construct a new GenericFilmAnalyzer.
*
* @param generator The generator to use.
*/
public GenericFilmAnalyzer(GenericGenerator generator) {
super();
this.generator = generator;
init();
Managers.getManager(IBeansManager.class).inject(this);
}
@Override
public Scanner getScanner() {
return null;
}
/**
* Init the parser.
*/
private void init() {
dateGetter = generator.getFieldGetter("date");
durationGetter = generator.getFieldGetter("duration");
imageGetter = generator.getFieldGetter("image");
kindGetter = generator.getFieldGetter("kind");
realizerGetter = generator.getFieldGetter("realizer");
resumeGetter = generator.getFieldGetter("resume");
actorsGetter = generator.getFieldGetter("actors");
}
@Override
public void findDate(String line) {
if (isDateDo()) {
return;
}
if (dateGetter.mustGet(line)) {
String transformedLine = dateGetter.performOperations(line, this);
String value = dateGetter.getValue(transformedLine);
if (value != null) {
getFilm().setYear(Integer.parseInt(value));
setDate(true);
}
}
}
@Override
public void findDuration(String line) {
if (isDurationDo()) {
return;
}
if (durationGetter.mustGet(line)) {
String transformedLine = durationGetter.performOperations(line, this);
String value = durationGetter.getValue(transformedLine);
if (value != null) {
int minutes;
if (value.contains("h")) {
String[] hour = HOUR_SEPARATOR_PATTERN.split(value);
minutes = Integer.parseInt(hour[0]) * SECONDS_IN_A_MINUTE + Integer.parseInt(hour[1]);
} else {
minutes = Integer.parseInt(value);
}
getFilm().setDuration(minutes);
setDuration(true);
}
}
}
@Override
public void findImage(String line) {
if (isImageDo()) {
return;
}
if (imageGetter.mustGet(line)) {
String transformedLine = imageGetter.performOperations(line, this);
String value = imageGetter.getValue(transformedLine);
if (value != null) {
AnalyzerUtils.downloadMiniature(getFilm(), value);
setImage(true);
}
}
}
@Override
public void findKind(String line) {
if (isKindDo()) {
return;
}
if (kindGetter.mustGet(line)) {
String transformedLine = kindGetter.performOperations(line, this);
String value = kindGetter.getValue(transformedLine);
if (StringUtils.isNotEmpty(value)) {
value = StringUtils.setFirstLetterOnlyUpper(value);
if (kindsService.exist(value)) {
getFilm().addKind(kindsService.getSimpleData(value));
} else {
SimpleData kind = kindsService.getEmptySimpleData();
kind.setName(value);
kindsService.create(kind);
getFilm().addKind(kind);
}
setKind(true);
}
}
}
@Override
public void findRealizer(String line) {
if (isRealizerDo()) {
return;
}
if (realizerGetter.mustGet(line)) {
String transformedLine = realizerGetter.performOperations(line, this);
String value = realizerGetter.getValue(transformedLine);
if (value != null) {
String[] nameAndFirstName = DataUtils.getNameAndFirstName(value);
setRealizerOfFilm(nameAndFirstName[0], nameAndFirstName[1]);
setRealizer(true);
}
}
}
/**
* Set the realizer of the film.
*
* @param name The name of the realizer.
* @param firstName The first name of the realizer.
*/
private void setRealizerOfFilm(String name, String firstName) {
if (realizersService.exists(firstName, name)) {
getFilm().setTheRealizer(realizersService.getRealizer(firstName, name));
} else {
Person realizer = realizersService.getEmptyRealizer();
realizer.setName(name);
realizer.setFirstName(firstName);
realizer.setTheCountry(countriesService.getDefaultSimpleData());
realizer.setNote(notesService.getDefaultNote());
realizersService.create(realizer);
getFilm().setTheRealizer(realizer);
}
}
@Override
public void findActors(String line) {
if (isActorsDo()) {
return;
}
if (actorsGetter.mustGet(line)) {
String transformedLine = actorsGetter.performOperations(line, this);
String value = actorsGetter.getValue(transformedLine);
if (value != null) {
value = StringUtils.removeHTMLEntities(value);
String[] actorsTemp = ACTOR_SEPARATOR_PATTERN.split(value);
int current = 0;
for (String name : actorsTemp) {
if (current++ >= filmsModule.getConfiguration().getNumberOfActors()) {
break;
}
String[] nameAndFirstName = DataUtils.getNameAndFirstName(name);
addActor(nameAndFirstName[0], nameAndFirstName[1]);
if (getFilm().getActors().size() >= filmsModule.getConfiguration().getNumberOfActors()) {
break;
}
}
}
}
}
/**
* Add an actor to the film.
*
* @param name The name of the actor.
* @param firstName The first name of the actor.
*/
private void addActor(String name, String firstName) {
if (actorService.exist(firstName, name)) {
Person actor = actorService.getPerson(firstName, name);
getFilm().addActor(actor);
} else {
Person actor = actorService.getEmptyPerson();
actor.setName(name);
actor.setFirstName(firstName);
actor.setTheCountry(countriesService.getDefaultSimpleData());
actor.setNote(notesService.getDefaultNote());
actorService.create(actor);
getFilm().addActor(actor);
}
}
@Override
public void findResume(String line) {
if (isResumeDo()) {
return;
}
if (resumeGetter.mustGet(line)) {
String transformedLine = resumeGetter.performOperations(line, this);
String value = resumeGetter.getValue(transformedLine);
if (value != null) {
getFilm().setResume(StringUtils.removeHTMLEntities(value));
setResume(true);
}
}
}
}
|
|
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.tasks.v2.stub;
import static com.google.cloud.tasks.v2.CloudTasksClient.ListQueuesPagedResponse;
import static com.google.cloud.tasks.v2.CloudTasksClient.ListTasksPagedResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.tasks.v2.CreateQueueRequest;
import com.google.cloud.tasks.v2.CreateTaskRequest;
import com.google.cloud.tasks.v2.DeleteQueueRequest;
import com.google.cloud.tasks.v2.DeleteTaskRequest;
import com.google.cloud.tasks.v2.GetQueueRequest;
import com.google.cloud.tasks.v2.GetTaskRequest;
import com.google.cloud.tasks.v2.ListQueuesRequest;
import com.google.cloud.tasks.v2.ListQueuesResponse;
import com.google.cloud.tasks.v2.ListTasksRequest;
import com.google.cloud.tasks.v2.ListTasksResponse;
import com.google.cloud.tasks.v2.PauseQueueRequest;
import com.google.cloud.tasks.v2.PurgeQueueRequest;
import com.google.cloud.tasks.v2.Queue;
import com.google.cloud.tasks.v2.ResumeQueueRequest;
import com.google.cloud.tasks.v2.RunTaskRequest;
import com.google.cloud.tasks.v2.Task;
import com.google.cloud.tasks.v2.UpdateQueueRequest;
import com.google.common.collect.ImmutableMap;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the CloudTasks service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcCloudTasksStub extends CloudTasksStub {
private static final MethodDescriptor<ListQueuesRequest, ListQueuesResponse>
listQueuesMethodDescriptor =
MethodDescriptor.<ListQueuesRequest, ListQueuesResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/ListQueues")
.setRequestMarshaller(ProtoUtils.marshaller(ListQueuesRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(ListQueuesResponse.getDefaultInstance()))
.build();
private static final MethodDescriptor<GetQueueRequest, Queue> getQueueMethodDescriptor =
MethodDescriptor.<GetQueueRequest, Queue>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/GetQueue")
.setRequestMarshaller(ProtoUtils.marshaller(GetQueueRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Queue.getDefaultInstance()))
.build();
private static final MethodDescriptor<CreateQueueRequest, Queue> createQueueMethodDescriptor =
MethodDescriptor.<CreateQueueRequest, Queue>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/CreateQueue")
.setRequestMarshaller(ProtoUtils.marshaller(CreateQueueRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Queue.getDefaultInstance()))
.build();
private static final MethodDescriptor<UpdateQueueRequest, Queue> updateQueueMethodDescriptor =
MethodDescriptor.<UpdateQueueRequest, Queue>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/UpdateQueue")
.setRequestMarshaller(ProtoUtils.marshaller(UpdateQueueRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Queue.getDefaultInstance()))
.build();
private static final MethodDescriptor<DeleteQueueRequest, Empty> deleteQueueMethodDescriptor =
MethodDescriptor.<DeleteQueueRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/DeleteQueue")
.setRequestMarshaller(ProtoUtils.marshaller(DeleteQueueRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.build();
private static final MethodDescriptor<PurgeQueueRequest, Queue> purgeQueueMethodDescriptor =
MethodDescriptor.<PurgeQueueRequest, Queue>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/PurgeQueue")
.setRequestMarshaller(ProtoUtils.marshaller(PurgeQueueRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Queue.getDefaultInstance()))
.build();
private static final MethodDescriptor<PauseQueueRequest, Queue> pauseQueueMethodDescriptor =
MethodDescriptor.<PauseQueueRequest, Queue>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/PauseQueue")
.setRequestMarshaller(ProtoUtils.marshaller(PauseQueueRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Queue.getDefaultInstance()))
.build();
private static final MethodDescriptor<ResumeQueueRequest, Queue> resumeQueueMethodDescriptor =
MethodDescriptor.<ResumeQueueRequest, Queue>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/ResumeQueue")
.setRequestMarshaller(ProtoUtils.marshaller(ResumeQueueRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Queue.getDefaultInstance()))
.build();
private static final MethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor =
MethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/GetIamPolicy")
.setRequestMarshaller(ProtoUtils.marshaller(GetIamPolicyRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance()))
.build();
private static final MethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor =
MethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/SetIamPolicy")
.setRequestMarshaller(ProtoUtils.marshaller(SetIamPolicyRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance()))
.build();
private static final MethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsMethodDescriptor =
MethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/TestIamPermissions")
.setRequestMarshaller(
ProtoUtils.marshaller(TestIamPermissionsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(TestIamPermissionsResponse.getDefaultInstance()))
.build();
private static final MethodDescriptor<ListTasksRequest, ListTasksResponse>
listTasksMethodDescriptor =
MethodDescriptor.<ListTasksRequest, ListTasksResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/ListTasks")
.setRequestMarshaller(ProtoUtils.marshaller(ListTasksRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(ListTasksResponse.getDefaultInstance()))
.build();
private static final MethodDescriptor<GetTaskRequest, Task> getTaskMethodDescriptor =
MethodDescriptor.<GetTaskRequest, Task>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/GetTask")
.setRequestMarshaller(ProtoUtils.marshaller(GetTaskRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Task.getDefaultInstance()))
.build();
private static final MethodDescriptor<CreateTaskRequest, Task> createTaskMethodDescriptor =
MethodDescriptor.<CreateTaskRequest, Task>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/CreateTask")
.setRequestMarshaller(ProtoUtils.marshaller(CreateTaskRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Task.getDefaultInstance()))
.build();
private static final MethodDescriptor<DeleteTaskRequest, Empty> deleteTaskMethodDescriptor =
MethodDescriptor.<DeleteTaskRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/DeleteTask")
.setRequestMarshaller(ProtoUtils.marshaller(DeleteTaskRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.build();
private static final MethodDescriptor<RunTaskRequest, Task> runTaskMethodDescriptor =
MethodDescriptor.<RunTaskRequest, Task>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.tasks.v2.CloudTasks/RunTask")
.setRequestMarshaller(ProtoUtils.marshaller(RunTaskRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Task.getDefaultInstance()))
.build();
private final UnaryCallable<ListQueuesRequest, ListQueuesResponse> listQueuesCallable;
private final UnaryCallable<ListQueuesRequest, ListQueuesPagedResponse> listQueuesPagedCallable;
private final UnaryCallable<GetQueueRequest, Queue> getQueueCallable;
private final UnaryCallable<CreateQueueRequest, Queue> createQueueCallable;
private final UnaryCallable<UpdateQueueRequest, Queue> updateQueueCallable;
private final UnaryCallable<DeleteQueueRequest, Empty> deleteQueueCallable;
private final UnaryCallable<PurgeQueueRequest, Queue> purgeQueueCallable;
private final UnaryCallable<PauseQueueRequest, Queue> pauseQueueCallable;
private final UnaryCallable<ResumeQueueRequest, Queue> resumeQueueCallable;
private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable;
private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable;
private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable;
private final UnaryCallable<ListTasksRequest, ListTasksResponse> listTasksCallable;
private final UnaryCallable<ListTasksRequest, ListTasksPagedResponse> listTasksPagedCallable;
private final UnaryCallable<GetTaskRequest, Task> getTaskCallable;
private final UnaryCallable<CreateTaskRequest, Task> createTaskCallable;
private final UnaryCallable<DeleteTaskRequest, Empty> deleteTaskCallable;
private final UnaryCallable<RunTaskRequest, Task> runTaskCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcCloudTasksStub create(CloudTasksStubSettings settings)
throws IOException {
return new GrpcCloudTasksStub(settings, ClientContext.create(settings));
}
public static final GrpcCloudTasksStub create(ClientContext clientContext) throws IOException {
return new GrpcCloudTasksStub(CloudTasksStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcCloudTasksStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcCloudTasksStub(
CloudTasksStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcCloudTasksStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcCloudTasksStub(CloudTasksStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new GrpcCloudTasksCallableFactory());
}
/**
* Constructs an instance of GrpcCloudTasksStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcCloudTasksStub(
CloudTasksStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<ListQueuesRequest, ListQueuesResponse> listQueuesTransportSettings =
GrpcCallSettings.<ListQueuesRequest, ListQueuesResponse>newBuilder()
.setMethodDescriptor(listQueuesMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<GetQueueRequest, Queue> getQueueTransportSettings =
GrpcCallSettings.<GetQueueRequest, Queue>newBuilder()
.setMethodDescriptor(getQueueMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<CreateQueueRequest, Queue> createQueueTransportSettings =
GrpcCallSettings.<CreateQueueRequest, Queue>newBuilder()
.setMethodDescriptor(createQueueMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<UpdateQueueRequest, Queue> updateQueueTransportSettings =
GrpcCallSettings.<UpdateQueueRequest, Queue>newBuilder()
.setMethodDescriptor(updateQueueMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("queue.name", String.valueOf(request.getQueue().getName()));
return params.build();
})
.build();
GrpcCallSettings<DeleteQueueRequest, Empty> deleteQueueTransportSettings =
GrpcCallSettings.<DeleteQueueRequest, Empty>newBuilder()
.setMethodDescriptor(deleteQueueMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<PurgeQueueRequest, Queue> purgeQueueTransportSettings =
GrpcCallSettings.<PurgeQueueRequest, Queue>newBuilder()
.setMethodDescriptor(purgeQueueMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<PauseQueueRequest, Queue> pauseQueueTransportSettings =
GrpcCallSettings.<PauseQueueRequest, Queue>newBuilder()
.setMethodDescriptor(pauseQueueMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<ResumeQueueRequest, Queue> resumeQueueTransportSettings =
GrpcCallSettings.<ResumeQueueRequest, Queue>newBuilder()
.setMethodDescriptor(resumeQueueMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings =
GrpcCallSettings.<GetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(getIamPolicyMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("resource", String.valueOf(request.getResource()));
return params.build();
})
.build();
GrpcCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings =
GrpcCallSettings.<SetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(setIamPolicyMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("resource", String.valueOf(request.getResource()));
return params.build();
})
.build();
GrpcCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsTransportSettings =
GrpcCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setMethodDescriptor(testIamPermissionsMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("resource", String.valueOf(request.getResource()));
return params.build();
})
.build();
GrpcCallSettings<ListTasksRequest, ListTasksResponse> listTasksTransportSettings =
GrpcCallSettings.<ListTasksRequest, ListTasksResponse>newBuilder()
.setMethodDescriptor(listTasksMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<GetTaskRequest, Task> getTaskTransportSettings =
GrpcCallSettings.<GetTaskRequest, Task>newBuilder()
.setMethodDescriptor(getTaskMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<CreateTaskRequest, Task> createTaskTransportSettings =
GrpcCallSettings.<CreateTaskRequest, Task>newBuilder()
.setMethodDescriptor(createTaskMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("parent", String.valueOf(request.getParent()));
return params.build();
})
.build();
GrpcCallSettings<DeleteTaskRequest, Empty> deleteTaskTransportSettings =
GrpcCallSettings.<DeleteTaskRequest, Empty>newBuilder()
.setMethodDescriptor(deleteTaskMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
GrpcCallSettings<RunTaskRequest, Task> runTaskTransportSettings =
GrpcCallSettings.<RunTaskRequest, Task>newBuilder()
.setMethodDescriptor(runTaskMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("name", String.valueOf(request.getName()));
return params.build();
})
.build();
this.listQueuesCallable =
callableFactory.createUnaryCallable(
listQueuesTransportSettings, settings.listQueuesSettings(), clientContext);
this.listQueuesPagedCallable =
callableFactory.createPagedCallable(
listQueuesTransportSettings, settings.listQueuesSettings(), clientContext);
this.getQueueCallable =
callableFactory.createUnaryCallable(
getQueueTransportSettings, settings.getQueueSettings(), clientContext);
this.createQueueCallable =
callableFactory.createUnaryCallable(
createQueueTransportSettings, settings.createQueueSettings(), clientContext);
this.updateQueueCallable =
callableFactory.createUnaryCallable(
updateQueueTransportSettings, settings.updateQueueSettings(), clientContext);
this.deleteQueueCallable =
callableFactory.createUnaryCallable(
deleteQueueTransportSettings, settings.deleteQueueSettings(), clientContext);
this.purgeQueueCallable =
callableFactory.createUnaryCallable(
purgeQueueTransportSettings, settings.purgeQueueSettings(), clientContext);
this.pauseQueueCallable =
callableFactory.createUnaryCallable(
pauseQueueTransportSettings, settings.pauseQueueSettings(), clientContext);
this.resumeQueueCallable =
callableFactory.createUnaryCallable(
resumeQueueTransportSettings, settings.resumeQueueSettings(), clientContext);
this.getIamPolicyCallable =
callableFactory.createUnaryCallable(
getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext);
this.setIamPolicyCallable =
callableFactory.createUnaryCallable(
setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext);
this.testIamPermissionsCallable =
callableFactory.createUnaryCallable(
testIamPermissionsTransportSettings,
settings.testIamPermissionsSettings(),
clientContext);
this.listTasksCallable =
callableFactory.createUnaryCallable(
listTasksTransportSettings, settings.listTasksSettings(), clientContext);
this.listTasksPagedCallable =
callableFactory.createPagedCallable(
listTasksTransportSettings, settings.listTasksSettings(), clientContext);
this.getTaskCallable =
callableFactory.createUnaryCallable(
getTaskTransportSettings, settings.getTaskSettings(), clientContext);
this.createTaskCallable =
callableFactory.createUnaryCallable(
createTaskTransportSettings, settings.createTaskSettings(), clientContext);
this.deleteTaskCallable =
callableFactory.createUnaryCallable(
deleteTaskTransportSettings, settings.deleteTaskSettings(), clientContext);
this.runTaskCallable =
callableFactory.createUnaryCallable(
runTaskTransportSettings, settings.runTaskSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<ListQueuesRequest, ListQueuesResponse> listQueuesCallable() {
return listQueuesCallable;
}
@Override
public UnaryCallable<ListQueuesRequest, ListQueuesPagedResponse> listQueuesPagedCallable() {
return listQueuesPagedCallable;
}
@Override
public UnaryCallable<GetQueueRequest, Queue> getQueueCallable() {
return getQueueCallable;
}
@Override
public UnaryCallable<CreateQueueRequest, Queue> createQueueCallable() {
return createQueueCallable;
}
@Override
public UnaryCallable<UpdateQueueRequest, Queue> updateQueueCallable() {
return updateQueueCallable;
}
@Override
public UnaryCallable<DeleteQueueRequest, Empty> deleteQueueCallable() {
return deleteQueueCallable;
}
@Override
public UnaryCallable<PurgeQueueRequest, Queue> purgeQueueCallable() {
return purgeQueueCallable;
}
@Override
public UnaryCallable<PauseQueueRequest, Queue> pauseQueueCallable() {
return pauseQueueCallable;
}
@Override
public UnaryCallable<ResumeQueueRequest, Queue> resumeQueueCallable() {
return resumeQueueCallable;
}
@Override
public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() {
return getIamPolicyCallable;
}
@Override
public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() {
return setIamPolicyCallable;
}
@Override
public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable() {
return testIamPermissionsCallable;
}
@Override
public UnaryCallable<ListTasksRequest, ListTasksResponse> listTasksCallable() {
return listTasksCallable;
}
@Override
public UnaryCallable<ListTasksRequest, ListTasksPagedResponse> listTasksPagedCallable() {
return listTasksPagedCallable;
}
@Override
public UnaryCallable<GetTaskRequest, Task> getTaskCallable() {
return getTaskCallable;
}
@Override
public UnaryCallable<CreateTaskRequest, Task> createTaskCallable() {
return createTaskCallable;
}
@Override
public UnaryCallable<DeleteTaskRequest, Empty> deleteTaskCallable() {
return deleteTaskCallable;
}
@Override
public UnaryCallable<RunTaskRequest, Task> runTaskCallable() {
return runTaskCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
|
/**
*/
package gluemodel.substationStandard.LNNodes.LNGroupR.util;
import gluemodel.substationStandard.LNNodes.DomainLNs.DomainLN;
import gluemodel.substationStandard.LNNodes.LNGroupR.*;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.util.Switch;
/**
* <!-- begin-user-doc -->
* The <b>Switch</b> for the model's inheritance hierarchy.
* It supports the call {@link #doSwitch(EObject) doSwitch(object)}
* to invoke the <code>caseXXX</code> method for each class of the model,
* starting with the actual class of the object
* and proceeding up the inheritance hierarchy
* until a non-null result is returned,
* which is the result of the switch.
* <!-- end-user-doc -->
* @see gluemodel.substationStandard.LNNodes.LNGroupR.LNGroupRPackage
* @generated
*/
public class LNGroupRSwitch<T> extends Switch<T> {
/**
* The cached model package
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static LNGroupRPackage modelPackage;
/**
* Creates an instance of the switch.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LNGroupRSwitch() {
if (modelPackage == null) {
modelPackage = LNGroupRPackage.eINSTANCE;
}
}
/**
* Checks whether this is a switch for the given package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param ePackage the package in question.
* @return whether this is a switch for the given package.
* @generated
*/
@Override
protected boolean isSwitchFor(EPackage ePackage) {
return ePackage == modelPackage;
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
@Override
protected T doSwitch(int classifierID, EObject theEObject) {
switch (classifierID) {
case LNGroupRPackage.GROUP_R: {
GroupR groupR = (GroupR)theEObject;
T result = caseGroupR(groupR);
if (result == null) result = caseDomainLN(groupR);
if (result == null) result = defaultCase(theEObject);
return result;
}
case LNGroupRPackage.RDRE: {
RDRE rdre = (RDRE)theEObject;
T result = caseRDRE(rdre);
if (result == null) result = caseGroupR(rdre);
if (result == null) result = caseDomainLN(rdre);
if (result == null) result = defaultCase(theEObject);
return result;
}
case LNGroupRPackage.RDRS: {
RDRS rdrs = (RDRS)theEObject;
T result = caseRDRS(rdrs);
if (result == null) result = caseGroupR(rdrs);
if (result == null) result = caseDomainLN(rdrs);
if (result == null) result = defaultCase(theEObject);
return result;
}
case LNGroupRPackage.RPSB: {
RPSB rpsb = (RPSB)theEObject;
T result = caseRPSB(rpsb);
if (result == null) result = caseGroupR(rpsb);
if (result == null) result = caseDomainLN(rpsb);
if (result == null) result = defaultCase(theEObject);
return result;
}
case LNGroupRPackage.RBRF: {
RBRF rbrf = (RBRF)theEObject;
T result = caseRBRF(rbrf);
if (result == null) result = caseGroupR(rbrf);
if (result == null) result = caseDomainLN(rbrf);
if (result == null) result = defaultCase(theEObject);
return result;
}
case LNGroupRPackage.RADR: {
RADR radr = (RADR)theEObject;
T result = caseRADR(radr);
if (result == null) result = caseGroupR(radr);
if (result == null) result = caseDomainLN(radr);
if (result == null) result = defaultCase(theEObject);
return result;
}
case LNGroupRPackage.RBDR: {
RBDR rbdr = (RBDR)theEObject;
T result = caseRBDR(rbdr);
if (result == null) result = caseGroupR(rbdr);
if (result == null) result = caseDomainLN(rbdr);
if (result == null) result = defaultCase(theEObject);
return result;
}
case LNGroupRPackage.RDIR: {
RDIR rdir = (RDIR)theEObject;
T result = caseRDIR(rdir);
if (result == null) result = caseGroupR(rdir);
if (result == null) result = caseDomainLN(rdir);
if (result == null) result = defaultCase(theEObject);
return result;
}
case LNGroupRPackage.RFLO: {
RFLO rflo = (RFLO)theEObject;
T result = caseRFLO(rflo);
if (result == null) result = caseGroupR(rflo);
if (result == null) result = caseDomainLN(rflo);
if (result == null) result = defaultCase(theEObject);
return result;
}
case LNGroupRPackage.RREC: {
RREC rrec = (RREC)theEObject;
T result = caseRREC(rrec);
if (result == null) result = caseGroupR(rrec);
if (result == null) result = caseDomainLN(rrec);
if (result == null) result = defaultCase(theEObject);
return result;
}
case LNGroupRPackage.RSYN: {
RSYN rsyn = (RSYN)theEObject;
T result = caseRSYN(rsyn);
if (result == null) result = caseGroupR(rsyn);
if (result == null) result = caseDomainLN(rsyn);
if (result == null) result = defaultCase(theEObject);
return result;
}
default: return defaultCase(theEObject);
}
}
/**
* Returns the result of interpreting the object as an instance of '<em>Group R</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Group R</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseGroupR(GroupR object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>RDRE</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>RDRE</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseRDRE(RDRE object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>RDRS</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>RDRS</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseRDRS(RDRS object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>RPSB</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>RPSB</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseRPSB(RPSB object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>RBRF</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>RBRF</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseRBRF(RBRF object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>RADR</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>RADR</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseRADR(RADR object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>RBDR</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>RBDR</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseRBDR(RBDR object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>RDIR</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>RDIR</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseRDIR(RDIR object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>RFLO</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>RFLO</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseRFLO(RFLO object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>RREC</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>RREC</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseRREC(RREC object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>RSYN</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>RSYN</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseRSYN(RSYN object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Domain LN</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Domain LN</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseDomainLN(DomainLN object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>EObject</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch, but this is the last case anyway.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>EObject</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject)
* @generated
*/
@Override
public T defaultCase(EObject object) {
return null;
}
} //LNGroupRSwitch
|
|
package examples;
import io.vertx.core.Handler;
import io.vertx.core.Vertx;
import io.vertx.core.VertxOptions;
import io.vertx.core.http.HttpMethod;
import io.vertx.core.http.HttpServer;
import io.vertx.core.http.HttpServerResponse;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.auth.AuthProvider;
import io.vertx.ext.auth.jwt.JWTAuth;
import io.vertx.ext.auth.jwt.JWTOptions;
import io.vertx.ext.web.*;
import io.vertx.ext.web.handler.*;
import io.vertx.ext.web.handler.sockjs.*;
import io.vertx.ext.web.sstore.ClusteredSessionStore;
import io.vertx.ext.web.sstore.LocalSessionStore;
import io.vertx.ext.web.sstore.SessionStore;
import io.vertx.ext.web.templ.TemplateEngine;
import java.util.Set;
/**
*
* These are the examples used in the documentation.
*
* @author <a href="mailto:julien@julienviet.com">Julien Viet</a>
* @author <a href="http://tfox.org">Tim Fox</a>
*/
public class Examples {
public void example1(Vertx vertx) {
HttpServer server = vertx.createHttpServer();
server.requestHandler(request -> {
// This handler gets called for each request that arrives on the server
HttpServerResponse response = request.response();
response.putHeader("content-type", "text/plain");
// Write to the response and end it
response.end("Hello World!");
});
server.listen(8080);
}
public void example2(Vertx vertx) {
HttpServer server = vertx.createHttpServer();
Router router = Router.router(vertx);
router.route().handler(routingContext -> {
// This handler will be called for every request
HttpServerResponse response = routingContext.response();
response.putHeader("content-type", "text/plain");
// Write to the response and end it
response.end("Hello World from Vert.x-Web!");
});
server.requestHandler(router::accept).listen(8080);
}
public void example3(Router router) {
Route route = router.route().path("/some/path/");
route.handler(routingContext -> {
// This handler will be called for the following request paths:
// `/some/path`
// `/some/path/`
// `/some/path//`
//
// but not:
// `/some/path/subdir`
});
}
public void example3_1(Router router) {
Route route = router.route().path("/some/path/*");
route.handler(routingContext -> {
// This handler will be called for any path that starts with
// `/some/path/`, e.g.
// `/some/path`
// `/some/path/`
// `/some/path/subdir`
// `/some/path/subdir/blah.html`
//
// but not:
// `/some/bath`
});
}
public void example4(Router router) {
Route route = router.route("/some/path/*");
route.handler(routingContext -> {
// This handler will be called same as previous example
});
}
public void example4_1(Router router) {
Route route = router.route(HttpMethod.POST, "/catalogue/products/:productype/:productid/");
route.handler(routingContext -> {
String productType = routingContext.request().getParam("producttype");
String productID = routingContext.request().getParam("productid");
// Do something with them...
});
}
public void example5(Router router) {
// Matches any path ending with 'foo'
Route route = router.route().pathRegex(".*foo");
route.handler(routingContext -> {
// This handler will be called for:
// /some/path/foo
// /foo
// /foo/bar/wibble/foo
// /foo/bar
// But not:
// /bar/wibble
});
}
public void example6(Router router) {
Route route = router.routeWithRegex(".*foo");
route.handler(routingContext -> {
// This handler will be called same as previous example
});
}
public void example6_1(Router router) {
Route route = router.routeWithRegex(".*foo");
// This regular expression matches paths that start with something like:
// "/foo/bar" - where the "foo" is captured into param0 and the "bar" is captured into
// param1
route.pathRegex("\\/([^\\/]+)\\/([^\\/]+)").handler(routingContext -> {
String productType = routingContext.request().getParam("param0");
String productID = routingContext.request().getParam("param1");
// Do something with them...
});
}
public void example7(Router router) {
Route route = router.route().method(HttpMethod.POST);
route.handler(routingContext -> {
// This handler will be called for any POST request
});
}
public void example8(Router router) {
Route route = router.route(HttpMethod.POST, "/some/path/");
route.handler(routingContext -> {
// This handler will be called for any POST request to a URI path starting with /some/path/
});
}
public void example8_1(Router router) {
router.get().handler(routingContext -> {
// Will be called for any GET request
});
router.get("/some/path/").handler(routingContext -> {
// Will be called for any GET request to a path
// starting with /some/path
});
router.getWithRegex(".*foo").handler(routingContext -> {
// Will be called for any GET request to a path
// ending with `foo`
});
// There are also equivalents to the above for PUT, POST, DELETE, HEAD and OPTIONS
}
public void example9(Router router) {
Route route = router.route().method(HttpMethod.POST).method(HttpMethod.PUT);
route.handler(routingContext -> {
// This handler will be called for any POST or PUT request
});
}
public void example10(Router router) {
Route route1 = router.route("/some/path/").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
// enable chunked responses because we will be adding data as
// we execute over other handlers. This is only required once and
// only if several handlers do output.
response.setChunked(true);
response.write("route1\n");
// Now call the next matching route
routingContext.next();
});
Route route2 = router.route("/some/path/").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
response.write("route2\n");
// Now call the next matching route
routingContext.next();
});
Route route3 = router.route("/some/path/").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
response.write("route3");
// Now end the response
routingContext.response().end();
});
}
public void example11(Router router) {
Route route1 = router.route("/some/path/").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
response.write("route1\n");
// Now call the next matching route
routingContext.next();
});
Route route2 = router.route("/some/path/").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
// enable chunked responses because we will be adding data as
// we execute over other handlers. This is only required once and
// only if several handlers do output.
response.setChunked(true);
response.write("route2\n");
// Now call the next matching route
routingContext.next();
});
Route route3 = router.route("/some/path/").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
response.write("route3");
// Now end the response
routingContext.response().end();
});
// Change the order of route2 so it runs before route1
route2.order(-1);
}
public void example12(Router router) {
// Exact match
router.route().consumes("text/html").handler(routingContext -> {
// This handler will be called for any request with
// content-type header set to `text/html`
});
}
public void example13(Router router) {
// Multiple exact matches
router.route().consumes("text/html").consumes("text/plain").handler(routingContext -> {
// This handler will be called for any request with
// content-type header set to `text/html` or `text/plain`.
});
}
public void example14(Router router) {
// Sub-type wildcard match
router.route().consumes("text/*").handler(routingContext -> {
// This handler will be called for any request with top level type `text`
// e.g. content-type header set to `text/html` or `text/plain` will both match
});
}
public void example15(Router router) {
// Top level type wildcard match
router.route().consumes("*/json").handler(routingContext -> {
// This handler will be called for any request with sub-type json
// e.g. content-type header set to `text/json` or `application/json` will both match
});
}
public void example16(Router router, String someJSON) {
router.route().produces("application/json").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
response.putHeader("content-type", "application/json");
response.write(someJSON).end();
});
}
public void example17(Router router, String whatever) {
// This route can produce two different MIME types
router.route().produces("application/json").produces("text/html").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
// Get the actual MIME type acceptable
String acceptableContentType = routingContext.getAcceptableContentType();
response.putHeader("content-type", acceptableContentType);
response.write(whatever).end();
});
}
public void example18(Router router) {
Route route = router.route(HttpMethod.PUT, "myapi/orders")
.consumes("application/json")
.produces("application/json");
route.handler(routingContext -> {
// This would be match for any PUT method to paths starting with "myapi/orders" with a
// content-type of "application/json"
// and an accept header matching "application/json"
});
}
public void example20(Router router) {
Route route1 = router.route("/some/path/").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
// enable chunked responses because we will be adding data as
// we execute over other handlers. This is only required once and
// only if several handlers do output.
response.setChunked(true);
response.write("route1\n");
// Call the next matching route after a 5 second delay
routingContext.vertx().setTimer(5000, tid -> routingContext.next());
});
Route route2 = router.route("/some/path/").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
response.write("route2\n");
// Call the next matching route after a 5 second delay
routingContext.vertx().setTimer(5000, tid -> routingContext.next());
});
Route route3 = router.route("/some/path/").handler(routingContext -> {
HttpServerResponse response = routingContext.response();
response.write("route3");
// Now end the response
routingContext.response().end();
});
}
public void example20_1(Router router, SomeLegacyService service) {
router.route().blockingHandler(routingContext -> {
// Do something that might take some time synchronously
service.doSomethingThatBlocks();
// Now call the next handler
routingContext.next();
});
}
interface SomeLegacyService {
void doSomethingThatBlocks();
}
public void example21(Router router) {
router.get("/some/path").handler(routingContext -> {
routingContext.put("foo", "bar");
routingContext.next();
});
router.get("/some/path/other").handler(routingContext -> {
String bar = routingContext.get("foo");
// Do something with bar
routingContext.response().end();
});
}
public void example22(Vertx vertx, String productJSON) {
Router restAPI = Router.router(vertx);
restAPI.get("/products/:productID").handler(rc -> {
// TODO Handle the lookup of the product....
rc.response().write(productJSON);
});
restAPI.put("/products/:productID").handler(rc -> {
// TODO Add a new product...
rc.response().end();
});
restAPI.delete("/products/:productID").handler(rc -> {
// TODO delete the product...
rc.response().end();
});
}
public void example23(Vertx vertx, Handler<RoutingContext> myStaticHandler, Handler<RoutingContext> myTemplateHandler) {
Router mainRouter = Router.router(vertx);
// Handle static resources
mainRouter.route("/static/*").handler(myStaticHandler);
mainRouter.route(".*\\.templ").handler(myTemplateHandler);
}
public void example24(Router mainRouter, Router restAPI) {
mainRouter.mountSubRouter("/productsAPI", restAPI);
}
public void example25(Router router) {
Route route = router.get("/somepath/*");
route.failureHandler(frc -> {
// This will be called for failures that occur
// when routing requests to paths starting with
// '/somepath/'
});
}
public void example26(Router router) {
Route route1 = router.get("/somepath/path1/");
route1.handler(routingContext -> {
// Let's say this throws a RuntimeException
throw new RuntimeException("something happened!");
});
Route route2 = router.get("/somepath/path2");
route2.handler(routingContext -> {
// This one deliberately fails the request passing in the status code
// E.g. 403 - Forbidden
routingContext.fail(403);
});
// Define a failure handler
// This will get called for any failures in the above handlers
Route route3 = router.get("/somepath/*");
route3.failureHandler(failureRoutingContext -> {
int statusCode = failureRoutingContext.statusCode();
// Status code will be 500 for the RuntimeException or 403 for the other failure
HttpServerResponse response = failureRoutingContext.response();
response.setStatusCode(statusCode).end("Sorry! Not today");
});
}
public void example27(Router router) {
// This body handler will be called for all routes
router.route().handler(BodyHandler.create());
}
public void example28(Router router) {
router.route().handler(BodyHandler.create());
router.post("/some/path/uploads").handler(routingContext -> {
Set<FileUpload> uploads = routingContext.fileUploads();
// Do something with uploads....
});
}
public void example29(Router router) {
// This cookie handler will be called for all routes
router.route().handler(CookieHandler.create());
}
public void example30(Router router) {
// This cookie handler will be called for all routes
router.route().handler(CookieHandler.create());
router.route("some/path/").handler(routingContext -> {
Cookie someCookie = routingContext.getCookie("mycookie");
String cookieValue = someCookie.getValue();
// Do something with cookie...
// Add a cookie - this will get written back in the response automatically
routingContext.addCookie(Cookie.cookie("othercookie", "somevalue"));
});
}
public void example31(Vertx vertx) {
// Create a local session store using defaults
SessionStore store1 = LocalSessionStore.create(vertx);
// Create a local session store specifying the local shared map name to use
// This might be useful if you have more than one application in the same
// Vert.x instance and want to use different maps for different applications
SessionStore store2 = LocalSessionStore.create(vertx, "myapp3.sessionmap");
// Create a local session store specifying the local shared map name to use and
// setting the reaper interval for expired sessions to 10 seconds
SessionStore store3 = LocalSessionStore.create(vertx, "myapp3.sessionmap", 10000);
}
public void example32() {
// a clustered Vert.x
Vertx.clusteredVertx(new VertxOptions().setClustered(true), res -> {
Vertx vertx = res.result();
// Create a clustered session store using defaults
SessionStore store1 = ClusteredSessionStore.create(vertx);
// Create a clustered session store specifying the distributed map name to use
// This might be useful if you have more than one application in the cluster
// and want to use different maps for different applications
SessionStore store2 = ClusteredSessionStore.create(vertx, "myclusteredapp3.sessionmap");
});
}
public void example33(Vertx vertx) {
Router router = Router.router(vertx);
// We need a cookie handler first
router.route().handler(CookieHandler.create());
// Create a clustered session store using defaults
SessionStore store = ClusteredSessionStore.create(vertx);
SessionHandler sessionHandler = SessionHandler.create(store);
// Make sure all requests are routed through the session handler too
router.route().handler(sessionHandler);
// Now your application handlers
router.route("/somepath/blah/").handler(routingContext -> {
Session session = routingContext.session();
session.put("foo", "bar");
// etc
});
}
public void example34(SessionHandler sessionHandler, Router router) {
router.route().handler(CookieHandler.create());
router.route().handler(sessionHandler);
// Now your application handlers
router.route("/somepath/blah").handler(routingContext -> {
Session session = routingContext.session();
// Put some data from the session
session.put("foo", "bar");
// Retrieve some data from a session
int age = session.get("age");
// Remove some data from a session
JsonObject obj = session.remove("myobj");
});
}
public void example37(Vertx vertx, AuthProvider authProvider, Router router) {
router.route().handler(CookieHandler.create());
router.route().handler(SessionHandler.create(LocalSessionStore.create(vertx)));
AuthHandler basicAuthHandler = BasicAuthHandler.create(authProvider);
}
public void example38(Vertx vertx, AuthProvider authProvider, Router router) {
router.route().handler(CookieHandler.create());
router.route().handler(SessionHandler.create(LocalSessionStore.create(vertx)));
router.route().handler(UserSessionHandler.create(authProvider));
AuthHandler basicAuthHandler = BasicAuthHandler.create(authProvider);
// All requests to paths starting with '/private/' will be protected
router.route("/private/*").handler(basicAuthHandler);
router.route("/someotherpath").handler(routingContext -> {
// This will be public access - no login required
});
router.route("/private/somepath").handler(routingContext -> {
// This will require a login
// This will have the value true
boolean isAuthenticated = routingContext.user() != null;
});
}
public void example39(Vertx vertx, AuthProvider authProvider, Router router) {
router.route().handler(CookieHandler.create());
router.route().handler(SessionHandler.create(LocalSessionStore.create(vertx)));
router.route().handler(UserSessionHandler.create(authProvider));
AuthHandler redirectAuthHandler = RedirectAuthHandler.create(authProvider);
// All requests to paths starting with '/private/' will be protected
router.route("/private/*").handler(redirectAuthHandler);
// Handle the actual login
router.route("/login").handler(FormLoginHandler.create(authProvider));
// Set a static server to serve static resources, e.g. the login page
router.route().handler(StaticHandler.create());
router.route("/someotherpath").handler(routingContext -> {
// This will be public access - no login required
});
router.route("/private/somepath").handler(routingContext -> {
// This will require a login
// This will have the value true
boolean isAuthenticated = routingContext.user() != null;
});
}
public void example40(AuthProvider authProvider, Router router) {
AuthHandler listProductsAuthHandler = RedirectAuthHandler.create(authProvider);
listProductsAuthHandler.addAuthority("list_products");
// Need "list_products" authority to list products
router.route("/listproducts/*").handler(listProductsAuthHandler);
AuthHandler settingsAuthHandler = RedirectAuthHandler.create(authProvider);
settingsAuthHandler.addAuthority("role:admin");
// Only "admin" has access to /private/settings
router.route("/private/settings/*").handler(settingsAuthHandler);
}
public void example41(Router router) {
router.route("/static/*").handler(StaticHandler.create());
}
public void example41_0_1(Router router) {
// Will only accept GET requests from origin "vertx.io"
router.route().handler(CorsHandler.create("vertx\\.io").allowedMethod(HttpMethod.GET));
router.route().handler(routingContext -> {
// Your app handlers
});
}
public void example41_2(Router router, TemplateEngine engine) {
TemplateHandler handler = TemplateHandler.create(engine);
router.get("/dynamic").handler(routingContext -> {
routingContext.put("request_path", routingContext.request().path());
routingContext.put("session_data", routingContext.session().data());
routingContext.next();
});
router.get("/dynamic/").handler(handler);
}
public void example41_3(Router router) {
// Any errors on paths beginning with '/somepath/' will be handled by this error handler
router.route("/somepath/").failureHandler(ErrorHandler.create());
}
public void example42(Router router) {
router.route("/foo/").handler(TimeoutHandler.create(5000));
}
public void example43(Vertx vertx) {
Router router = Router.router(vertx);
SockJSHandlerOptions options = new SockJSHandlerOptions().setHeartbeatInterval(2000);
SockJSHandler sockJSHandler = SockJSHandler.create(vertx, options);
router.route("/myapp/*").handler(sockJSHandler);
}
public void example44(Vertx vertx) {
Router router = Router.router(vertx);
SockJSHandlerOptions options = new SockJSHandlerOptions().setHeartbeatInterval(2000);
SockJSHandler sockJSHandler = SockJSHandler.create(vertx, options);
sockJSHandler.socketHandler(sockJSSocket -> {
// Just echo the data back
sockJSSocket.handler(sockJSSocket::write);
});
router.route("/myapp/*").handler(sockJSHandler);
}
public void example45(Vertx vertx) {
Router router = Router.router(vertx);
SockJSHandler sockJSHandler = SockJSHandler.create(vertx);
BridgeOptions options = new BridgeOptions();
sockJSHandler.bridge(options);
router.route("/eventbus/*").handler(sockJSHandler);
}
public void example46(Vertx vertx) {
Router router = Router.router(vertx);
SockJSHandler sockJSHandler = SockJSHandler.create(vertx);
// Let through any messages sent to 'demo.orderMgr' from the client
PermittedOptions inboundPermitted1 = new PermittedOptions().setAddress("demo.orderMgr");
// Allow calls to the address 'demo.persistor' from the client as long as the messages
// have an action field with value 'find' and a collection field with value
// 'albums'
PermittedOptions inboundPermitted2 = new PermittedOptions().setAddress("demo.persistor")
.setMatch(new JsonObject().put("action", "find")
.put("collection", "albums"));
// Allow through any message with a field `wibble` with value `foo`.
PermittedOptions inboundPermitted3 = new PermittedOptions().setMatch(new JsonObject().put("wibble", "foo"));
// First let's define what we're going to allow from server -> client
// Let through any messages coming from address 'ticker.mystock'
PermittedOptions outboundPermitted1 = new PermittedOptions().setAddress("ticker.mystock");
// Let through any messages from addresses starting with "news." (e.g. news.europe, news.usa, etc)
PermittedOptions outboundPermitted2 = new PermittedOptions().setAddressRegex("news\\..+");
// Let's define what we're going to allow from client -> server
BridgeOptions options = new BridgeOptions().
addInboundPermitted(inboundPermitted1).
addInboundPermitted(inboundPermitted1).
addInboundPermitted(inboundPermitted3).
addOutboundPermitted(outboundPermitted1).
addOutboundPermitted(outboundPermitted2);
sockJSHandler.bridge(options);
router.route("/eventbus/*").handler(sockJSHandler);
}
public void example47() {
// Let through any messages sent to 'demo.orderService' from the client
PermittedOptions inboundPermitted = new PermittedOptions().setAddress("demo.orderService");
// But only if the user is logged in and has the authority "place_orders"
inboundPermitted.setRequiredAuthority("place_orders");
BridgeOptions options = new BridgeOptions().addInboundPermitted(inboundPermitted);
}
public void example48(Vertx vertx, AuthProvider authProvider) {
Router router = Router.router(vertx);
// Let through any messages sent to 'demo.orderService' from the client
PermittedOptions inboundPermitted = new PermittedOptions().setAddress("demo.orderService");
// But only if the user is logged in and has the authority "place_orders"
inboundPermitted.setRequiredAuthority("place_orders");
SockJSHandler sockJSHandler = SockJSHandler.create(vertx);
sockJSHandler.bridge(new BridgeOptions().
addInboundPermitted(inboundPermitted));
// Now set up some basic auth handling:
router.route().handler(CookieHandler.create());
router.route().handler(SessionHandler.create(LocalSessionStore.create(vertx)));
AuthHandler basicAuthHandler = BasicAuthHandler.create(authProvider);
router.route("/eventbus/*").handler(basicAuthHandler);
router.route("/eventbus/*").handler(sockJSHandler);
}
public void example48_1(Vertx vertx) {
Router router = Router.router(vertx);
// Let through any messages sent to 'demo.orderService' from the client
PermittedOptions inboundPermitted = new PermittedOptions().setAddress("demo.orderService");
SockJSHandler sockJSHandler = SockJSHandler.create(vertx);
BridgeOptions options = new BridgeOptions().addInboundPermitted(inboundPermitted);
sockJSHandler.bridge(options, be -> {
if (be.type() == BridgeEventType.PUBLISH || be.type() == BridgeEventType.SEND) {
// Add some headers
JsonObject headers = new JsonObject().put("header1", "val").put("header2", "val2");
be.rawMessage().put("headers", headers);
}
be.complete(true);
});
router.route("/eventbus").handler(sockJSHandler);
}
public void example49(Vertx vertx) {
Router router = Router.router(vertx);
// Let through any messages sent to 'demo.orderMgr' from the client
PermittedOptions inboundPermitted = new PermittedOptions().setAddress("demo.someService");
SockJSHandler sockJSHandler = SockJSHandler.create(vertx);
BridgeOptions options = new BridgeOptions().addInboundPermitted(inboundPermitted);
sockJSHandler.bridge(options, be -> {
if (be.type() == BridgeEventType.PUBLISH || be.type() == BridgeEventType.RECEIVE) {
if (be.rawMessage().getString("body").equals("armadillos")) {
// Reject it
be.complete(false);
return;
}
}
be.complete(true);
});
router.route("/eventbus").handler(sockJSHandler);
}
public void example50(Vertx vertx) {
Router router = Router.router(vertx);
JsonObject authConfig = new JsonObject().put("keyStore", new JsonObject()
.put("type", "jceks")
.put("path", "keystore.jceks")
.put("password", "secret"));
JWTAuth authProvider = JWTAuth.create(vertx, authConfig);
router.route("/login").handler(ctx -> {
// this is an example, authentication should be done with another provider...
if ("paulo".equals(ctx.request().getParam("username")) && "secret".equals(ctx.request().getParam("password"))) {
ctx.response().end(authProvider.generateToken(new JsonObject().put("sub", "paulo"), new JWTOptions()));
} else {
ctx.fail(401);
}
});
}
public void example51(Vertx vertx) {
Router router = Router.router(vertx);
JsonObject authConfig = new JsonObject().put("keyStore", new JsonObject()
.put("type", "jceks")
.put("path", "keystore.jceks")
.put("password", "secret"));
JWTAuth authProvider = JWTAuth.create(vertx, authConfig);
router.route("/protected/*").handler(JWTAuthHandler.create(authProvider));
router.route("/protected/somepage").handler(ctx -> {
// some handle code...
});
}
public void example52(Vertx vertx) {
JsonObject authConfig = new JsonObject().put("keyStore", new JsonObject()
.put("type", "jceks")
.put("path", "keystore.jceks")
.put("password", "secret"));
JWTAuth authProvider = JWTAuth.create(vertx, authConfig);
authProvider.generateToken(new JsonObject().put("sub", "paulo").put("someKey", "some value"), new JWTOptions());
}
public void example53(Vertx vertx) {
Handler<RoutingContext> handler = rc -> {
String theSubject = rc.user().principal().getString("sub");
String someKey = rc.user().principal().getString("someKey");
};
}
public void example54(Router router) {
router.route().handler(CookieHandler.create());
router.route().handler(CSRFHandler.create("abracadabra"));
router.route().handler(rc -> {
});
}
public void example55(Router router) {
router.get("/some/path").handler(routingContext -> {
routingContext.put("foo", "bar");
routingContext.next();
});
router.get("/some/path/B").handler(routingContext -> {
routingContext.response().end();
});
router.get("/some/path").handler(routingContext -> {
routingContext.reroute("/some/path/B");
});
}
public void example56(Router router) {
router.route().handler(VirtualHostHandler.create("*.vertx.io", routingContext -> {
// do something if the request is for *.vertx.io
}));
}
}
|
|
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.servlet.spec;
import io.undertow.security.api.SecurityContext;
import io.undertow.security.idm.Account;
import io.undertow.server.HttpServerExchange;
import io.undertow.server.RequestTooBigException;
import io.undertow.server.handlers.form.FormData;
import io.undertow.server.handlers.form.FormDataParser;
import io.undertow.server.handlers.form.MultiPartParserDefinition;
import io.undertow.server.protocol.http.HttpAttachments;
import io.undertow.server.session.Session;
import io.undertow.server.session.SessionConfig;
import io.undertow.servlet.UndertowServletMessages;
import io.undertow.servlet.api.AuthorizationManager;
import io.undertow.servlet.api.Deployment;
import io.undertow.servlet.api.InstanceFactory;
import io.undertow.servlet.api.InstanceHandle;
import io.undertow.servlet.core.ManagedServlet;
import io.undertow.servlet.core.ServletUpgradeListener;
import io.undertow.servlet.handlers.ServletChain;
import io.undertow.servlet.handlers.ServletPathMatch;
import io.undertow.servlet.handlers.ServletRequestContext;
import io.undertow.servlet.util.EmptyEnumeration;
import io.undertow.servlet.util.IteratorEnumeration;
import io.undertow.util.AttachmentKey;
import io.undertow.util.CanonicalPathUtils;
import io.undertow.util.DateUtils;
import io.undertow.util.HeaderMap;
import io.undertow.util.HeaderValues;
import io.undertow.util.Headers;
import io.undertow.util.HttpString;
import io.undertow.util.LocaleUtils;
import io.undertow.util.Methods;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.nio.charset.Charset;
import java.nio.charset.UnsupportedCharsetException;
import java.security.AccessController;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.Deque;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.AsyncContext;
import javax.servlet.DispatcherType;
import javax.servlet.MultipartConfigElement;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletRequest;
import javax.servlet.ServletRequestWrapper;
import javax.servlet.ServletResponse;
import javax.servlet.ServletResponseWrapper;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletMapping;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpUpgradeHandler;
import javax.servlet.http.Part;
import javax.servlet.http.PushBuilder;
/**
* The http servlet request implementation. This class is not thread safe
*
* @author Stuart Douglas
*/
public final class HttpServletRequestImpl implements HttpServletRequest {
@Deprecated
public static final AttachmentKey<Boolean> SECURE_REQUEST = HttpServerExchange.SECURE_REQUEST;
private final HttpServerExchange exchange;
private final ServletContextImpl originalServletContext;
private ServletContextImpl servletContext;
private Map<String, Object> attributes = null;
private ServletInputStream servletInputStream;
private BufferedReader reader;
private Cookie[] cookies;
private List<Part> parts = null;
private volatile boolean asyncStarted = false;
private volatile AsyncContextImpl asyncContext = null;
private Map<String, Deque<String>> queryParameters;
private FormData parsedFormData;
private RuntimeException formParsingException;
private Charset characterEncoding;
private boolean readStarted;
private SessionConfig.SessionCookieSource sessionCookieSource;
public HttpServletRequestImpl(final HttpServerExchange exchange, final ServletContextImpl servletContext) {
this.exchange = exchange;
this.servletContext = servletContext;
this.originalServletContext = servletContext;
}
public HttpServerExchange getExchange() {
return exchange;
}
@Override
public String getAuthType() {
SecurityContext securityContext = exchange.getSecurityContext();
return securityContext != null ? securityContext.getMechanismName() : null;
}
@Override
public Cookie[] getCookies() {
if (cookies == null) {
Map<String, io.undertow.server.handlers.Cookie> cookies = exchange.getRequestCookies();
if (cookies.isEmpty()) {
return null;
}
int count = cookies.size();
Cookie[] value = new Cookie[count];
int i = 0;
for (Map.Entry<String, io.undertow.server.handlers.Cookie> entry : cookies.entrySet()) {
io.undertow.server.handlers.Cookie cookie = entry.getValue();
try {
Cookie c = new Cookie(cookie.getName(), cookie.getValue());
if (cookie.getDomain() != null) {
c.setDomain(cookie.getDomain());
}
c.setHttpOnly(cookie.isHttpOnly());
if (cookie.getMaxAge() != null) {
c.setMaxAge(cookie.getMaxAge());
}
if (cookie.getPath() != null) {
c.setPath(cookie.getPath());
}
c.setSecure(cookie.isSecure());
c.setVersion(cookie.getVersion());
value[i++] = c;
} catch (IllegalArgumentException e) {
// Ignore bad cookie
}
}
if( i < count ) {
Cookie[] shrunkCookies = new Cookie[i];
System.arraycopy(value, 0, shrunkCookies, 0, i);
value = shrunkCookies;
}
this.cookies = value;
}
return cookies;
}
@Override
public long getDateHeader(final String name) {
String header = exchange.getRequestHeaders().getFirst(name);
if (header == null) {
return -1;
}
Date date = DateUtils.parseDate(header);
if (date == null) {
throw UndertowServletMessages.MESSAGES.headerCannotBeConvertedToDate(header);
}
return date.getTime();
}
@Override
public String getHeader(final String name) {
HeaderMap headers = exchange.getRequestHeaders();
return headers.getFirst(name);
}
public String getHeader(final HttpString name) {
HeaderMap headers = exchange.getRequestHeaders();
return headers.getFirst(name);
}
@Override
public Enumeration<String> getHeaders(final String name) {
List<String> headers = exchange.getRequestHeaders().get(name);
if (headers == null) {
return EmptyEnumeration.instance();
}
return new IteratorEnumeration<>(headers.iterator());
}
@Override
public Enumeration<String> getHeaderNames() {
final Set<String> headers = new HashSet<>();
for (final HttpString i : exchange.getRequestHeaders().getHeaderNames()) {
headers.add(i.toString());
}
return new IteratorEnumeration<>(headers.iterator());
}
@Override
public HttpServletMapping getHttpServletMapping() {
ServletRequestContext src = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
ServletPathMatch match = src.getOriginalServletPathMatch();
if(getDispatcherType() == DispatcherType.FORWARD) {
match = src.getServletPathMatch();
}
String matchValue;
switch (match.getMappingMatch()) {
case EXACT:
matchValue = match.getMatched();
if(matchValue.startsWith("/")) {
matchValue = matchValue.substring(1);
}
break;
case DEFAULT:
case CONTEXT_ROOT:
matchValue = "";
break;
case PATH:
matchValue = match.getRemaining();
if(matchValue.startsWith("/")) {
matchValue = matchValue.substring(1);
}
break;
case EXTENSION:
matchValue = match.getMatched().substring(0, match.getMatched().length() - match.getMatchString().length() + 1);
if(matchValue.startsWith("/")) {
matchValue = matchValue.substring(1);
}
break;
default:
matchValue = match.getRemaining();
}
return new MappingImpl(matchValue, match.getMatchString(), match.getMappingMatch(), match.getServletChain().getManagedServlet().getServletInfo().getName());
}
@Override
public int getIntHeader(final String name) {
String header = getHeader(name);
if (header == null) {
return -1;
}
return Integer.parseInt(header);
}
@Override
public String getMethod() {
return exchange.getRequestMethod().toString();
}
@Override
public String getPathInfo() {
ServletPathMatch match = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).getServletPathMatch();
if (match != null) {
return match.getRemaining();
}
return null;
}
@Override
public String getPathTranslated() {
return getRealPath(getPathInfo());
}
@Override
public String getContextPath() {
return servletContext.getContextPath();
}
@Override
public String getQueryString() {
return exchange.getQueryString().isEmpty() ? null : exchange.getQueryString();
}
@Override
public String getRemoteUser() {
Principal userPrincipal = getUserPrincipal();
return userPrincipal != null ? userPrincipal.getName() : null;
}
@Override
public boolean isUserInRole(final String role) {
if (role == null) {
return false;
}
//according to the servlet spec this aways returns false
if (role.equals("*")) {
return false;
}
SecurityContext sc = exchange.getSecurityContext();
Account account = sc.getAuthenticatedAccount();
if (account == null) {
return false;
}
ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
if (role.equals("**")) {
Set<String> roles = servletRequestContext.getDeployment().getDeploymentInfo().getSecurityRoles();
if (!roles.contains("**")) {
return true;
}
}
final ServletChain servlet = servletRequestContext.getCurrentServlet();
final Deployment deployment = servletContext.getDeployment();
final AuthorizationManager authorizationManager = deployment.getDeploymentInfo().getAuthorizationManager();
return authorizationManager.isUserInRole(role, account, servlet.getManagedServlet().getServletInfo(), this, deployment);
}
@Override
public Principal getUserPrincipal() {
SecurityContext securityContext = exchange.getSecurityContext();
Principal result = null;
Account account = null;
if (securityContext != null && (account = securityContext.getAuthenticatedAccount()) != null) {
result = account.getPrincipal();
}
return result;
}
@Override
public String getRequestedSessionId() {
SessionConfig config = originalServletContext.getSessionConfig();
if(config instanceof ServletContextImpl.ServletContextSessionConfig) {
return ((ServletContextImpl.ServletContextSessionConfig)config).getDelegate().findSessionId(exchange);
}
return config.findSessionId(exchange);
}
@Override
public String changeSessionId() {
HttpSessionImpl session = servletContext.getSession(originalServletContext, exchange, false);
if (session == null) {
throw UndertowServletMessages.MESSAGES.noSession();
}
String oldId = session.getId();
Session underlyingSession;
if(System.getSecurityManager() == null) {
underlyingSession = session.getSession();
} else {
underlyingSession = AccessController.doPrivileged(new HttpSessionImpl.UnwrapSessionAction(session));
}
String newId = underlyingSession.changeSessionId(exchange, originalServletContext.getSessionConfig());
servletContext.getDeployment().getApplicationListeners().httpSessionIdChanged(session, oldId);
return newId;
}
@Override
public String getRequestURI() {
//we need the non-decoded string, which means we need to use exchange.getRequestURI()
if(exchange.isHostIncludedInRequestURI()) {
//we need to strip out the host part
String uri = exchange.getRequestURI();
int slashes =0;
for(int i = 0; i < uri.length(); ++i) {
if(uri.charAt(i) == '/') {
if(++slashes == 3) {
return uri.substring(i);
}
}
}
return "/";
} else {
return exchange.getRequestURI();
}
}
@Override
public StringBuffer getRequestURL() {
return new StringBuffer(exchange.getRequestURL());
}
@Override
public String getServletPath() {
ServletPathMatch match = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).getServletPathMatch();
if (match != null) {
return match.getMatched();
}
return "";
}
@Override
public HttpSession getSession(final boolean create) {
return servletContext.getSession(originalServletContext, exchange, create);
}
@Override
public HttpSession getSession() {
return getSession(true);
}
@Override
public boolean isRequestedSessionIdValid() {
HttpSessionImpl session = servletContext.getSession(originalServletContext, exchange, false);
if(session == null) {
return false;
}
if(session.isInvalid()) {
return false;
}
return session.getId().equals(getRequestedSessionId());
}
@Override
public boolean isRequestedSessionIdFromCookie() {
return sessionCookieSource() == SessionConfig.SessionCookieSource.COOKIE;
}
@Override
public boolean isRequestedSessionIdFromURL() {
return sessionCookieSource() == SessionConfig.SessionCookieSource.URL;
}
@Override
public boolean isRequestedSessionIdFromUrl() {
return isRequestedSessionIdFromURL();
}
@Override
public boolean authenticate(final HttpServletResponse response) throws IOException, ServletException {
if (response.isCommitted()) {
throw UndertowServletMessages.MESSAGES.responseAlreadyCommited();
}
SecurityContext sc = exchange.getSecurityContext();
sc.setAuthenticationRequired();
// TODO: this will set the status code and headers without going through any potential
// wrappers, is this a problem?
if (sc.authenticate()) {
if (sc.isAuthenticated()) {
return true;
} else {
throw UndertowServletMessages.MESSAGES.authenticationFailed();
}
} else {
if(!exchange.isResponseStarted() && exchange.getStatusCode() == 200) {
throw UndertowServletMessages.MESSAGES.authenticationFailed();
} else {
return false;
}
}
}
@Override
public void login(final String username, final String password) throws ServletException {
if (username == null || password == null) {
throw UndertowServletMessages.MESSAGES.loginFailed();
}
SecurityContext sc = exchange.getSecurityContext();
if (sc.isAuthenticated()) {
throw UndertowServletMessages.MESSAGES.userAlreadyLoggedIn();
}
boolean login = false;
try {
login = sc.login(username, password);
}
catch (SecurityException se) {
if (se.getCause() instanceof ServletException)
throw (ServletException) se.getCause();
throw new ServletException(se);
}
if (!login) {
throw UndertowServletMessages.MESSAGES.loginFailed();
}
}
@Override
public void logout() throws ServletException {
SecurityContext sc = exchange.getSecurityContext();
sc.logout();
if(servletContext.getDeployment().getDeploymentInfo().isInvalidateSessionOnLogout()) {
HttpSession session = getSession(false);
if(session != null) {
session.invalidate();
}
}
}
@Override
public Collection<Part> getParts() throws IOException, ServletException {
verifyMultipartServlet();
if (parts == null) {
loadParts();
}
return parts;
}
private void verifyMultipartServlet() {
ServletRequestContext src = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
MultipartConfigElement multipart = src.getServletPathMatch().getServletChain().getManagedServlet().getMultipartConfig();
if(multipart == null) {
throw UndertowServletMessages.MESSAGES.multipartConfigNotPresent();
}
}
@Override
public Part getPart(final String name) throws IOException, ServletException {
verifyMultipartServlet();
if (parts == null) {
loadParts();
}
for (Part part : parts) {
if (part.getName().equals(name)) {
return part;
}
}
return null;
}
@Override
public <T extends HttpUpgradeHandler> T upgrade(final Class<T> handlerClass) throws IOException {
try {
InstanceFactory<T> factory = servletContext.getDeployment().getDeploymentInfo().getClassIntrospecter().createInstanceFactory(handlerClass);
final InstanceHandle<T> instance = factory.createInstance();
exchange.upgradeChannel(new ServletUpgradeListener<>(instance, servletContext.getDeployment(), exchange));
return instance.getInstance();
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
private void loadParts() throws IOException, ServletException {
final ServletRequestContext requestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
if (parts == null) {
final List<Part> parts = new ArrayList<>();
String mimeType = exchange.getRequestHeaders().getFirst(Headers.CONTENT_TYPE);
if (mimeType != null && mimeType.startsWith(MultiPartParserDefinition.MULTIPART_FORM_DATA)) {
FormData formData = parseFormData();
if(formData != null) {
for (final String namedPart : formData) {
for (FormData.FormValue part : formData.get(namedPart)) {
parts.add(new PartImpl(namedPart,
part,
requestContext.getOriginalServletPathMatch().getServletChain().getManagedServlet().getMultipartConfig(),
servletContext, this));
}
}
}
} else {
throw UndertowServletMessages.MESSAGES.notAMultiPartRequest();
}
this.parts = parts;
}
}
@Override
public Object getAttribute(final String name) {
if (attributes == null) {
return null;
}
return attributes.get(name);
}
@Override
public Enumeration<String> getAttributeNames() {
if (attributes == null) {
return EmptyEnumeration.instance();
}
return new IteratorEnumeration<>(attributes.keySet().iterator());
}
@Override
public String getCharacterEncoding() {
if (characterEncoding != null) {
return characterEncoding.name();
}
String characterEncodingFromHeader = getCharacterEncodingFromHeader();
if (characterEncodingFromHeader != null) {
return characterEncodingFromHeader;
}
if (servletContext.getDeployment().getDeploymentInfo().getDefaultRequestEncoding() != null ||
servletContext.getDeployment().getDeploymentInfo().getDefaultEncoding() != null) {
return servletContext.getDeployment().getDefaultRequestCharset().name();
}
return null;
}
private String getCharacterEncodingFromHeader() {
String contentType = exchange.getRequestHeaders().getFirst(Headers.CONTENT_TYPE);
if (contentType == null) {
return null;
}
return Headers.extractQuotedValueFromHeader(contentType, "charset");
}
@Override
public void setCharacterEncoding(final String env) throws UnsupportedEncodingException {
if (readStarted) {
return;
}
try {
characterEncoding = Charset.forName(env);
final ManagedServlet originalServlet = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).getOriginalServletPathMatch().getServletChain().getManagedServlet();
final FormDataParser parser = originalServlet.getFormParserFactory().createParser(exchange);
if (parser != null) {
parser.setCharacterEncoding(env);
}
} catch (UnsupportedCharsetException e) {
throw new UnsupportedEncodingException();
}
}
@Override
public int getContentLength() {
long length = getContentLengthLong();
if(length > Integer.MAX_VALUE) {
return -1;
}
return (int)length;
}
@Override
public long getContentLengthLong() {
final String contentLength = getHeader(Headers.CONTENT_LENGTH);
if (contentLength == null || contentLength.isEmpty()) {
return -1;
}
return Long.parseLong(contentLength);
}
@Override
public String getContentType() {
return getHeader(Headers.CONTENT_TYPE);
}
@Override
public ServletInputStream getInputStream() throws IOException {
if (reader != null) {
throw UndertowServletMessages.MESSAGES.getReaderAlreadyCalled();
}
if(servletInputStream == null) {
servletInputStream = new ServletInputStreamImpl(this);
}
readStarted = true;
return servletInputStream;
}
public void closeAndDrainRequest() throws IOException {
if(reader != null) {
reader.close();
}
if(servletInputStream == null) {
servletInputStream = new ServletInputStreamImpl(this);
}
servletInputStream.close();
}
/**
* Frees any resources (namely buffers) that may be associated with this request.
*
*/
public void freeResources() throws IOException {
if(reader != null) {
reader.close();
}
if(servletInputStream != null) {
servletInputStream.close();
}
}
@Override
public String getParameter(final String name) {
if(queryParameters == null) {
queryParameters = exchange.getQueryParameters();
}
Deque<String> params = queryParameters.get(name);
if (params == null) {
final FormData parsedFormData = parseFormData();
if (parsedFormData != null) {
FormData.FormValue res = parsedFormData.getFirst(name);
if (res == null || res.isFileItem()) {
return null;
} else {
return res.getValue();
}
}
return null;
}
return params.getFirst();
}
@Override
public Enumeration<String> getParameterNames() {
if (queryParameters == null) {
queryParameters = exchange.getQueryParameters();
}
final Set<String> parameterNames = new HashSet<>(queryParameters.keySet());
if (exchange.getRequestMethod().equals(Methods.POST)) {
final FormData parsedFormData = parseFormData();
if (parsedFormData != null) {
Iterator<String> it = parsedFormData.iterator();
while (it.hasNext()) {
String name = it.next();
for(FormData.FormValue param : parsedFormData.get(name)) {
if(!param.isFileItem()) {
parameterNames.add(name);
break;
}
}
}
}
}
return new IteratorEnumeration<>(parameterNames.iterator());
}
@Override
public String[] getParameterValues(final String name) {
if (queryParameters == null) {
queryParameters = exchange.getQueryParameters();
}
final List<String> ret = new ArrayList<>();
Deque<String> params = queryParameters.get(name);
if (params != null) {
for (String param : params) {
ret.add(param);
}
}
if (exchange.getRequestMethod().equals(Methods.POST)) {
final FormData parsedFormData = parseFormData();
if (parsedFormData != null) {
Deque<FormData.FormValue> res = parsedFormData.get(name);
if (res != null) {
for (FormData.FormValue value : res) {
if(!value.isFileItem()) {
ret.add(value.getValue());
}
}
}
}
}
if (ret.isEmpty()) {
return null;
}
return ret.toArray(new String[ret.size()]);
}
@Override
public Map<String, String[]> getParameterMap() {
if (queryParameters == null) {
queryParameters = exchange.getQueryParameters();
}
final Map<String, ArrayList<String>> arrayMap = new HashMap<>();
for (Map.Entry<String, Deque<String>> entry : queryParameters.entrySet()) {
arrayMap.put(entry.getKey(), new ArrayList<>(entry.getValue()));
}
if (exchange.getRequestMethod().equals(Methods.POST)) {
final FormData parsedFormData = parseFormData();
if (parsedFormData != null) {
Iterator<String> it = parsedFormData.iterator();
while (it.hasNext()) {
final String name = it.next();
Deque<FormData.FormValue> val = parsedFormData.get(name);
if (arrayMap.containsKey(name)) {
ArrayList<String> existing = arrayMap.get(name);
for (final FormData.FormValue v : val) {
if(!v.isFileItem()) {
existing.add(v.getValue());
}
}
} else {
final ArrayList<String> values = new ArrayList<>();
for (final FormData.FormValue v : val) {
if(!v.isFileItem()) {
values.add(v.getValue());
}
}
arrayMap.put(name, values);
}
}
}
}
final Map<String, String[]> ret = new HashMap<>();
for(Map.Entry<String, ArrayList<String>> entry : arrayMap.entrySet()) {
ret.put(entry.getKey(), entry.getValue().toArray(new String[entry.getValue().size()]));
}
return ret;
}
private FormData parseFormData() {
if(formParsingException != null) {
throw formParsingException;
}
if (parsedFormData == null) {
if (readStarted) {
return null;
}
final ManagedServlet originalServlet = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).getCurrentServlet().getManagedServlet();
final FormDataParser parser = originalServlet.getFormParserFactory().createParser(exchange);
if (parser == null) {
return null;
}
readStarted = true;
try {
return parsedFormData = parser.parseBlocking();
} catch (RequestTooBigException | MultiPartParserDefinition.FileTooLargeException e) {
throw formParsingException = new IllegalStateException(e);
} catch (RuntimeException e) {
throw formParsingException = e;
} catch (IOException e) {
throw formParsingException = new RuntimeException(e);
}
}
return parsedFormData;
}
@Override
public String getProtocol() {
return exchange.getProtocol().toString();
}
@Override
public String getScheme() {
return exchange.getRequestScheme();
}
@Override
public String getServerName() {
return exchange.getHostName();
}
@Override
public int getServerPort() {
return exchange.getHostPort();
}
@Override
public BufferedReader getReader() throws IOException {
if (reader == null) {
if (servletInputStream != null) {
throw UndertowServletMessages.MESSAGES.getInputStreamAlreadyCalled();
}
Charset charSet = servletContext.getDeployment().getDefaultRequestCharset();
if (characterEncoding != null) {
charSet = characterEncoding;
} else {
String c = getCharacterEncodingFromHeader();
if (c != null) {
try {
charSet = Charset.forName(c);
} catch (UnsupportedCharsetException e) {
throw new UnsupportedEncodingException();
}
}
}
reader = new BufferedReader(new InputStreamReader(exchange.getInputStream(), charSet));
}
readStarted = true;
return reader;
}
@Override
public String getRemoteAddr() {
InetSocketAddress sourceAddress = exchange.getSourceAddress();
if(sourceAddress == null) {
return "";
}
InetAddress address = sourceAddress.getAddress();
if(address == null) {
//this is unresolved, so we just return the host name
//not exactly spec, but if the name should be resolved then a PeerNameResolvingHandler should be used
//and this is probably better than just returning null
return sourceAddress.getHostString();
}
return address.getHostAddress();
}
@Override
public String getRemoteHost() {
InetSocketAddress sourceAddress = exchange.getSourceAddress();
if(sourceAddress == null) {
return "";
}
return sourceAddress.getHostString();
}
@Override
public void setAttribute(final String name, final Object object) {
if(object == null) {
removeAttribute(name);
return;
}
if (attributes == null) {
attributes = new HashMap<>();
}
Object existing = attributes.put(name, object);
if (existing != null) {
servletContext.getDeployment().getApplicationListeners().servletRequestAttributeReplaced(this, name, existing);
} else {
servletContext.getDeployment().getApplicationListeners().servletRequestAttributeAdded(this, name, object);
}
}
@Override
public void removeAttribute(final String name) {
if (attributes == null) {
return;
}
Object exiting = attributes.remove(name);
servletContext.getDeployment().getApplicationListeners().servletRequestAttributeRemoved(this, name, exiting);
}
@Override
public Locale getLocale() {
return getLocales().nextElement();
}
@Override
public Enumeration<Locale> getLocales() {
final List<String> acceptLanguage = exchange.getRequestHeaders().get(Headers.ACCEPT_LANGUAGE);
List<Locale> ret = LocaleUtils.getLocalesFromHeader(acceptLanguage);
if(ret.isEmpty()) {
return new IteratorEnumeration<>(Collections.singletonList(Locale.getDefault()).iterator());
}
return new IteratorEnumeration<>(ret.iterator());
}
@Override
public boolean isSecure() {
return exchange.isSecure();
}
@Override
public RequestDispatcher getRequestDispatcher(final String path) {
String realPath;
if (path.startsWith("/")) {
realPath = path;
} else {
String current = exchange.getRelativePath();
int lastSlash = current.lastIndexOf("/");
if (lastSlash != -1) {
current = current.substring(0, lastSlash + 1);
}
realPath = CanonicalPathUtils.canonicalize(current + path);
}
return new RequestDispatcherImpl(realPath, servletContext);
}
@Override
public String getRealPath(final String path) {
return servletContext.getRealPath(path);
}
@Override
public int getRemotePort() {
return exchange.getSourceAddress().getPort();
}
@Override
public String getLocalName() {
return exchange.getDestinationAddress().getHostString();
}
@Override
public String getLocalAddr() {
InetSocketAddress destinationAddress = exchange.getDestinationAddress();
if (destinationAddress == null) {
return "";
}
InetAddress address = destinationAddress.getAddress();
if (address == null) {
//this is unresolved, so we just return the host name
return destinationAddress.getHostString();
}
return address.getHostAddress();
}
@Override
public int getLocalPort() {
return exchange.getDestinationAddress().getPort();
}
@Override
public ServletContextImpl getServletContext() {
return servletContext;
}
@Override
public AsyncContext startAsync() throws IllegalStateException {
if (!isAsyncSupported()) {
throw UndertowServletMessages.MESSAGES.startAsyncNotAllowed();
} else if (asyncStarted) {
throw UndertowServletMessages.MESSAGES.asyncAlreadyStarted();
}
asyncStarted = true;
final ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
return asyncContext = new AsyncContextImpl(exchange, servletRequestContext.getServletRequest(), servletRequestContext.getServletResponse(), servletRequestContext, false, asyncContext);
}
@Override
public AsyncContext startAsync(final ServletRequest servletRequest, final ServletResponse servletResponse) throws IllegalStateException {
final ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
if (!servletContext.getDeployment().getDeploymentInfo().isAllowNonStandardWrappers()) {
if (servletRequestContext.getOriginalRequest() != servletRequest) {
if (!(servletRequest instanceof ServletRequestWrapper)) {
throw UndertowServletMessages.MESSAGES.requestWasNotOriginalOrWrapper(servletRequest);
}
}
if (servletRequestContext.getOriginalResponse() != servletResponse) {
if (!(servletResponse instanceof ServletResponseWrapper)) {
throw UndertowServletMessages.MESSAGES.responseWasNotOriginalOrWrapper(servletResponse);
}
}
}
if (!isAsyncSupported()) {
throw UndertowServletMessages.MESSAGES.startAsyncNotAllowed();
} else if (asyncStarted) {
throw UndertowServletMessages.MESSAGES.asyncAlreadyStarted();
}
asyncStarted = true;
servletRequestContext.setServletRequest(servletRequest);
servletRequestContext.setServletResponse(servletResponse);
return asyncContext = new AsyncContextImpl(exchange, servletRequest, servletResponse, servletRequestContext, true, asyncContext);
}
@Override
public boolean isAsyncStarted() {
return asyncStarted;
}
@Override
public boolean isAsyncSupported() {
return exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).isAsyncSupported();
}
@Override
public AsyncContextImpl getAsyncContext() {
if (!isAsyncStarted()) {
throw UndertowServletMessages.MESSAGES.asyncNotStarted();
}
return asyncContext;
}
public AsyncContextImpl getAsyncContextInternal() {
return asyncContext;
}
@Override
public DispatcherType getDispatcherType() {
return exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY).getDispatcherType();
}
public Map<String, Deque<String>> getQueryParameters() {
if (queryParameters == null) {
queryParameters = exchange.getQueryParameters();
}
return queryParameters;
}
public void setQueryParameters(final Map<String, Deque<String>> queryParameters) {
this.queryParameters = queryParameters;
}
public void setServletContext(final ServletContextImpl servletContext) {
this.servletContext = servletContext;
}
void asyncRequestDispatched() {
asyncStarted = false;
}
public String getOriginalRequestURI() {
String uri = (String) getAttribute(RequestDispatcher.FORWARD_REQUEST_URI);
if(uri != null) {
return uri;
}
uri = (String) getAttribute(AsyncContext.ASYNC_REQUEST_URI);
if(uri != null) {
return uri;
}
return getRequestURI();
}
public String getOriginalServletPath() {
String uri = (String) getAttribute(RequestDispatcher.FORWARD_SERVLET_PATH);
if(uri != null) {
return uri;
}
uri = (String) getAttribute(AsyncContext.ASYNC_SERVLET_PATH);
if(uri != null) {
return uri;
}
return getServletPath();
}
public String getOriginalPathInfo() {
String uri = (String) getAttribute(RequestDispatcher.FORWARD_PATH_INFO);
if(uri != null) {
return uri;
}
uri = (String) getAttribute(AsyncContext.ASYNC_PATH_INFO);
if(uri != null) {
return uri;
}
return getPathInfo();
}
public String getOriginalContextPath() {
String uri = (String) getAttribute(RequestDispatcher.FORWARD_CONTEXT_PATH);
if(uri != null) {
return uri;
}
uri = (String) getAttribute(AsyncContext.ASYNC_CONTEXT_PATH);
if(uri != null) {
return uri;
}
return getContextPath();
}
public String getOriginalQueryString() {
String uri = (String) getAttribute(RequestDispatcher.FORWARD_QUERY_STRING);
if(uri != null) {
return uri;
}
uri = (String) getAttribute(AsyncContext.ASYNC_QUERY_STRING);
if(uri != null) {
return uri;
}
return getQueryString();
}
private SessionConfig.SessionCookieSource sessionCookieSource() {
HttpSession session = getSession(false);
if(session == null) {
return SessionConfig.SessionCookieSource.NONE;
}
if(sessionCookieSource == null) {
sessionCookieSource = originalServletContext.getSessionConfig().sessionCookieSource(exchange);
}
return sessionCookieSource;
}
@Override
public String toString() {
return "HttpServletRequestImpl [ " + getMethod() + ' ' + getRequestURI() + " ]";
}
public void clearAttributes() {
if(attributes != null) {
this.attributes.clear();
}
}
@Override
public PushBuilder newPushBuilder() {
if(exchange.getConnection().isPushSupported()) {
return new PushBuilderImpl(this);
}
return null;
}
@Override
public Map<String, String> getTrailerFields() {
HeaderMap trailers = exchange.getAttachment(HttpAttachments.REQUEST_TRAILERS);
if(trailers == null) {
return Collections.emptyMap();
}
Map<String, String> ret = new HashMap<>();
for(HeaderValues entry : trailers) {
ret.put(entry.getHeaderName().toString().toLowerCase(Locale.ENGLISH), entry.getFirst());
}
return ret;
}
@Override
public boolean isTrailerFieldsReady() {
if(exchange.isRequestComplete()) {
return true;
}
return !exchange.getConnection().isRequestTrailerFieldsSupported();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.db;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import org.apache.cassandra.CleanupHelper;
import org.apache.cassandra.Util;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.config.ConfigurationException;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.columniterator.IdentityQueryFilter;
import org.apache.cassandra.db.filter.*;
import org.apache.cassandra.db.index.SecondaryIndex;
import org.apache.cassandra.db.marshal.LongType;
import org.apache.cassandra.db.marshal.LexicalUUIDType;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.io.sstable.Component;
import org.apache.cassandra.io.sstable.Descriptor;
import org.apache.cassandra.io.sstable.SSTableReader;
import org.apache.cassandra.io.sstable.SSTable;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.WrappedRunnable;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertTrue;
import static org.apache.cassandra.Util.column;
import static org.apache.cassandra.Util.getBytes;
import static org.apache.cassandra.db.TableTest.assertColumns;
import static org.junit.Assert.assertNull;
import org.junit.Test;
public class ColumnFamilyStoreTest extends CleanupHelper
{
static byte[] bytes1, bytes2;
static
{
Random random = new Random();
bytes1 = new byte[1024];
bytes2 = new byte[128];
random.nextBytes(bytes1);
random.nextBytes(bytes2);
}
@Test
// create two sstables, and verify that we only deserialize data from the most recent one
public void testTimeSortedQuery() throws IOException, ExecutionException, InterruptedException
{
Table table = Table.open("Keyspace1");
ColumnFamilyStore cfs = table.getColumnFamilyStore("Standard1");
cfs.truncate().get();
RowMutation rm;
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("key1"));
rm.add(new QueryPath("Standard1", null, ByteBufferUtil.bytes("Column1")), ByteBufferUtil.bytes("asdf"), 0);
rm.apply();
cfs.forceBlockingFlush();
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("key1"));
rm.add(new QueryPath("Standard1", null, ByteBufferUtil.bytes("Column1")), ByteBufferUtil.bytes("asdf"), 1);
rm.apply();
cfs.forceBlockingFlush();
cfs.getRecentSSTablesPerReadHistogram(); // resets counts
cfs.getColumnFamily(QueryFilter.getNamesFilter(Util.dk("key1"), new QueryPath("Standard1", null), ByteBufferUtil.bytes("Column1")));
assertEquals(1, cfs.getRecentSSTablesPerReadHistogram()[0]);
}
@Test
public void testGetColumnWithWrongBF() throws IOException, ExecutionException, InterruptedException
{
Table table = Table.open("Keyspace1");
ColumnFamilyStore cfs = table.getColumnFamilyStore("Standard1");
cfs.truncate().get();
List<IMutation> rms = new LinkedList<IMutation>();
RowMutation rm;
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("key1"));
rm.add(new QueryPath("Standard1", null, ByteBufferUtil.bytes("Column1")), ByteBufferUtil.bytes("asdf"), 0);
rm.add(new QueryPath("Standard1", null, ByteBufferUtil.bytes("Column2")), ByteBufferUtil.bytes("asdf"), 0);
rms.add(rm);
Util.writeColumnFamily(rms);
List<SSTableReader> ssTables = table.getAllSSTables();
assertEquals(1, ssTables.size());
ssTables.get(0).forceFilterFailures();
ColumnFamily cf = cfs.getColumnFamily(QueryFilter.getIdentityFilter(Util.dk("key2"), new QueryPath("Standard1", null, ByteBufferUtil.bytes("Column1"))));
assertNull(cf);
}
@Test
public void testEmptyRow() throws Exception
{
Table table = Table.open("Keyspace1");
final ColumnFamilyStore store = table.getColumnFamilyStore("Standard2");
RowMutation rm;
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("key1"));
rm.delete(new QueryPath("Standard2", null, null), System.currentTimeMillis());
rm.apply();
Runnable r = new WrappedRunnable()
{
public void runMayThrow() throws IOException
{
QueryFilter sliceFilter = QueryFilter.getSliceFilter(Util.dk("key1"), new QueryPath("Standard2", null, null), ByteBufferUtil.EMPTY_BYTE_BUFFER, ByteBufferUtil.EMPTY_BYTE_BUFFER, false, 1);
ColumnFamily cf = store.getColumnFamily(sliceFilter);
assert cf.isMarkedForDelete();
assert cf.isEmpty();
QueryFilter namesFilter = QueryFilter.getNamesFilter(Util.dk("key1"), new QueryPath("Standard2", null, null), ByteBufferUtil.bytes("a"));
cf = store.getColumnFamily(namesFilter);
assert cf.isMarkedForDelete();
assert cf.isEmpty();
}
};
TableTest.reTest(store, r);
}
@Test
public void testSkipStartKey() throws IOException, ExecutionException, InterruptedException
{
ColumnFamilyStore cfs = insertKey1Key2();
IPartitioner p = StorageService.getPartitioner();
List<Row> result = cfs.getRangeSlice(ByteBufferUtil.EMPTY_BYTE_BUFFER,
Util.range(p, "key1", "key2"),
10,
new NamesQueryFilter(ByteBufferUtil.bytes("asdf")),
null);
assertEquals(1, result.size());
assert result.get(0).key.key.equals(ByteBufferUtil.bytes("key2"));
}
@Test
public void testIndexScan() throws IOException
{
RowMutation rm;
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("k1"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("notbirthdate")), ByteBufferUtil.bytes(1L), 0);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 0);
rm.apply();
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("k2"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("notbirthdate")), ByteBufferUtil.bytes(2L), 0);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(2L), 0);
rm.apply();
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("k3"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("notbirthdate")), ByteBufferUtil.bytes(2L), 0);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 0);
rm.apply();
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("k4aaaa"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("notbirthdate")), ByteBufferUtil.bytes(2L), 0);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(3L), 0);
rm.apply();
// basic single-expression query
IndexExpression expr = new IndexExpression(ByteBufferUtil.bytes("birthdate"), IndexOperator.EQ, ByteBufferUtil.bytes(1L));
List<IndexExpression> clause = Arrays.asList(expr);
IFilter filter = new IdentityQueryFilter();
IPartitioner p = StorageService.getPartitioner();
Range<RowPosition> range = Util.range("", "");
List<Row> rows = Table.open("Keyspace1").getColumnFamilyStore("Indexed1").search(clause, range, 100, filter);
assert rows != null;
assert rows.size() == 2 : StringUtils.join(rows, ",");
String key = new String(rows.get(0).key.key.array(),rows.get(0).key.key.position(),rows.get(0).key.key.remaining());
assert "k1".equals( key ) : key;
key = new String(rows.get(1).key.key.array(),rows.get(1).key.key.position(),rows.get(1).key.key.remaining());
assert "k3".equals(key) : key;
assert ByteBufferUtil.bytes(1L).equals( rows.get(0).cf.getColumn(ByteBufferUtil.bytes("birthdate")).value());
assert ByteBufferUtil.bytes(1L).equals( rows.get(1).cf.getColumn(ByteBufferUtil.bytes("birthdate")).value());
// add a second expression
IndexExpression expr2 = new IndexExpression(ByteBufferUtil.bytes("notbirthdate"), IndexOperator.GTE, ByteBufferUtil.bytes(2L));
clause = Arrays.asList(expr, expr2);
rows = Table.open("Keyspace1").getColumnFamilyStore("Indexed1").search(clause, range, 100, filter);
assert rows.size() == 1 : StringUtils.join(rows, ",");
key = new String(rows.get(0).key.key.array(),rows.get(0).key.key.position(),rows.get(0).key.key.remaining());
assert "k3".equals( key );
// same query again, but with resultset not including the subordinate expression
rows = Table.open("Keyspace1").getColumnFamilyStore("Indexed1").search(clause, range, 100, new NamesQueryFilter(ByteBufferUtil.bytes("birthdate")));
assert rows.size() == 1 : StringUtils.join(rows, ",");
key = new String(rows.get(0).key.key.array(),rows.get(0).key.key.position(),rows.get(0).key.key.remaining());
assert "k3".equals( key );
assert rows.get(0).cf.getColumnCount() == 1 : rows.get(0).cf;
// once more, this time with a slice rowset that needs to be expanded
SliceQueryFilter emptyFilter = new SliceQueryFilter(ByteBufferUtil.EMPTY_BYTE_BUFFER, ByteBufferUtil.EMPTY_BYTE_BUFFER, false, 0);
rows = Table.open("Keyspace1").getColumnFamilyStore("Indexed1").search(clause, range, 100, emptyFilter);
assert rows.size() == 1 : StringUtils.join(rows, ",");
key = new String(rows.get(0).key.key.array(),rows.get(0).key.key.position(),rows.get(0).key.key.remaining());
assert "k3".equals( key );
assert rows.get(0).cf.getColumnCount() == 0;
// query with index hit but rejected by secondary clause, with a small enough count that just checking count
// doesn't tell the scan loop that it's done
IndexExpression expr3 = new IndexExpression(ByteBufferUtil.bytes("notbirthdate"), IndexOperator.EQ, ByteBufferUtil.bytes(-1L));
clause = Arrays.asList(expr, expr3);
rows = Table.open("Keyspace1").getColumnFamilyStore("Indexed1").search(clause, range, 100, filter);
assert rows.isEmpty();
}
@Test
public void testLargeScan() throws IOException
{
RowMutation rm;
for (int i = 0; i < 100; i++)
{
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("key" + i));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(34L), 0);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("notbirthdate")), ByteBufferUtil.bytes((long) (i % 2)), 0);
rm.applyUnsafe();
}
IndexExpression expr = new IndexExpression(ByteBufferUtil.bytes("birthdate"), IndexOperator.EQ, ByteBufferUtil.bytes(34L));
IndexExpression expr2 = new IndexExpression(ByteBufferUtil.bytes("notbirthdate"), IndexOperator.EQ, ByteBufferUtil.bytes(1L));
List<IndexExpression> clause = Arrays.asList(expr, expr2);
IFilter filter = new IdentityQueryFilter();
IPartitioner p = StorageService.getPartitioner();
Range<RowPosition> range = Util.range("", "");
List<Row> rows = Table.open("Keyspace1").getColumnFamilyStore("Indexed1").search(clause, range, 100, filter);
assert rows != null;
assert rows.size() == 50 : rows.size();
Set<DecoratedKey> keys = new HashSet<DecoratedKey>();
// extra check that there are no duplicate results -- see https://issues.apache.org/jira/browse/CASSANDRA-2406
for (Row row : rows)
keys.add(row.key);
assert rows.size() == keys.size();
}
@Test
public void testIndexDeletions() throws IOException
{
ColumnFamilyStore cfs = Table.open("Keyspace3").getColumnFamilyStore("Indexed1");
RowMutation rm;
rm = new RowMutation("Keyspace3", ByteBufferUtil.bytes("k1"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 0);
rm.apply();
IndexExpression expr = new IndexExpression(ByteBufferUtil.bytes("birthdate"), IndexOperator.EQ, ByteBufferUtil.bytes(1L));
List<IndexExpression> clause = Arrays.asList(expr);
IFilter filter = new IdentityQueryFilter();
IPartitioner p = StorageService.getPartitioner();
Range<RowPosition> range = Util.range("", "");
List<Row> rows = cfs.search(clause, range, 100, filter);
assert rows.size() == 1 : StringUtils.join(rows, ",");
String key = ByteBufferUtil.string(rows.get(0).key.key);
assert "k1".equals( key );
// delete the column directly
rm = new RowMutation("Keyspace3", ByteBufferUtil.bytes("k1"));
rm.delete(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), 1);
rm.apply();
rows = cfs.search(clause, range, 100, filter);
assert rows.isEmpty();
// verify that it's not being indexed under the deletion column value either
IColumn deletion = rm.getColumnFamilies().iterator().next().iterator().next();
ByteBuffer deletionLong = ByteBufferUtil.bytes((long) ByteBufferUtil.toInt(deletion.value()));
IndexExpression expr0 = new IndexExpression(ByteBufferUtil.bytes("birthdate"), IndexOperator.EQ, deletionLong);
List<IndexExpression> clause0 = Arrays.asList(expr0);
rows = cfs.search(clause0, range, 100, filter);
assert rows.isEmpty();
// resurrect w/ a newer timestamp
rm = new RowMutation("Keyspace3", ByteBufferUtil.bytes("k1"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 2);
rm.apply();
rows = cfs.search(clause, range, 100, filter);
assert rows.size() == 1 : StringUtils.join(rows, ",");
key = ByteBufferUtil.string(rows.get(0).key.key);
assert "k1".equals( key );
// verify that row and delete w/ older timestamp does nothing
rm = new RowMutation("Keyspace3", ByteBufferUtil.bytes("k1"));
rm.delete(new QueryPath("Indexed1"), 1);
rm.apply();
rows = cfs.search(clause, range, 100, filter);
assert rows.size() == 1 : StringUtils.join(rows, ",");
key = ByteBufferUtil.string(rows.get(0).key.key);
assert "k1".equals( key );
// similarly, column delete w/ older timestamp should do nothing
rm = new RowMutation("Keyspace3", ByteBufferUtil.bytes("k1"));
rm.delete(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), 1);
rm.apply();
rows = cfs.search(clause, range, 100, filter);
assert rows.size() == 1 : StringUtils.join(rows, ",");
key = ByteBufferUtil.string(rows.get(0).key.key);
assert "k1".equals( key );
// delete the entire row (w/ newer timestamp this time)
rm = new RowMutation("Keyspace3", ByteBufferUtil.bytes("k1"));
rm.delete(new QueryPath("Indexed1"), 3);
rm.apply();
rows = cfs.search(clause, range, 100, filter);
assert rows.isEmpty() : StringUtils.join(rows, ",");
// make sure obsolete mutations don't generate an index entry
rm = new RowMutation("Keyspace3", ByteBufferUtil.bytes("k1"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 3);
rm.apply();
rows = cfs.search(clause, range, 100, filter);
assert rows.isEmpty() : StringUtils.join(rows, ",");
// try insert followed by row delete in the same mutation
rm = new RowMutation("Keyspace3", ByteBufferUtil.bytes("k1"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 1);
rm.delete(new QueryPath("Indexed1"), 2);
rm.apply();
rows = cfs.search(clause, range, 100, filter);
assert rows.isEmpty() : StringUtils.join(rows, ",");
// try row delete followed by insert in the same mutation
rm = new RowMutation("Keyspace3", ByteBufferUtil.bytes("k1"));
rm.delete(new QueryPath("Indexed1"), 3);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 4);
rm.apply();
rows = cfs.search(clause, range, 100, filter);
assert rows.size() == 1 : StringUtils.join(rows, ",");
key = ByteBufferUtil.string(rows.get(0).key.key);
assert "k1".equals( key );
}
@Test
public void testIndexUpdate() throws IOException
{
Table table = Table.open("Keyspace2");
// create a row and update the birthdate value, test that the index query fetches the new version
RowMutation rm;
rm = new RowMutation("Keyspace2", ByteBufferUtil.bytes("k1"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 1);
rm.apply();
rm = new RowMutation("Keyspace2", ByteBufferUtil.bytes("k1"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(2L), 2);
rm.apply();
IndexExpression expr = new IndexExpression(ByteBufferUtil.bytes("birthdate"), IndexOperator.EQ, ByteBufferUtil.bytes(1L));
List<IndexExpression> clause = Arrays.asList(expr);
IFilter filter = new IdentityQueryFilter();
IPartitioner p = StorageService.getPartitioner();
Range<RowPosition> range = Util.range("", "");
List<Row> rows = table.getColumnFamilyStore("Indexed1").search(clause, range, 100, filter);
assert rows.size() == 0;
expr = new IndexExpression(ByteBufferUtil.bytes("birthdate"), IndexOperator.EQ, ByteBufferUtil.bytes(2L));
clause = Arrays.asList(expr);
rows = table.getColumnFamilyStore("Indexed1").search(clause, range, 100, filter);
String key = ByteBufferUtil.string(rows.get(0).key.key);
assert "k1".equals( key );
// update the birthdate value with an OLDER timestamp, and test that the index ignores this
rm = new RowMutation("Keyspace2", ByteBufferUtil.bytes("k1"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(3L), 0);
rm.apply();
rows = table.getColumnFamilyStore("Indexed1").search(clause, range, 100, filter);
key = ByteBufferUtil.string(rows.get(0).key.key);
assert "k1".equals( key );
}
// See CASSANDRA-2628
@Test
public void testIndexScanWithLimitOne() throws IOException
{
RowMutation rm;
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("kk1"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("notbirthdate")), ByteBufferUtil.bytes(1L), 0);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 0);
rm.apply();
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("kk2"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("notbirthdate")), ByteBufferUtil.bytes(2L), 0);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 0);
rm.apply();
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("kk3"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("notbirthdate")), ByteBufferUtil.bytes(2L), 0);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 0);
rm.apply();
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("kk4"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("notbirthdate")), ByteBufferUtil.bytes(2L), 0);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 0);
rm.apply();
// basic single-expression query
IndexExpression expr1 = new IndexExpression(ByteBufferUtil.bytes("birthdate"), IndexOperator.EQ, ByteBufferUtil.bytes(1L));
IndexExpression expr2 = new IndexExpression(ByteBufferUtil.bytes("notbirthdate"), IndexOperator.GT, ByteBufferUtil.bytes(1L));
List<IndexExpression> clause = Arrays.asList(new IndexExpression[]{ expr1, expr2 });
IFilter filter = new IdentityQueryFilter();
IPartitioner p = StorageService.getPartitioner();
Range<RowPosition> range = Util.range("", "");
List<Row> rows = Table.open("Keyspace1").getColumnFamilyStore("Indexed1").search(clause, range, 1, filter);
assert rows != null;
assert rows.size() == 1 : StringUtils.join(rows, ",");
}
@Test
public void testIndexCreate() throws IOException, ConfigurationException, InterruptedException, ExecutionException
{
Table table = Table.open("Keyspace1");
// create a row and update the birthdate value, test that the index query fetches the new version
RowMutation rm;
rm = new RowMutation("Keyspace1", ByteBufferUtil.bytes("k1"));
rm.add(new QueryPath("Indexed2", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 1);
rm.apply();
ColumnFamilyStore cfs = table.getColumnFamilyStore("Indexed2");
ColumnDefinition old = cfs.metadata.getColumn_metadata().get(ByteBufferUtil.bytes("birthdate"));
ColumnDefinition cd = new ColumnDefinition(old.name, old.getValidator(), IndexType.KEYS, null, "birthdate_index");
Future<?> future = cfs.indexManager.addIndexedColumn(cd);
future.get();
// we had a bug (CASSANDRA-2244) where index would get created but not flushed -- check for that
assert cfs.indexManager.getIndexForColumn(cd.name).getIndexCfs().getSSTables().size() > 0;
queryBirthdate(table);
// validate that drop clears it out & rebuild works (CASSANDRA-2320)
SecondaryIndex indexedCfs = cfs.indexManager.getIndexForColumn(ByteBufferUtil.bytes("birthdate"));
cfs.indexManager.removeIndexedColumn(ByteBufferUtil.bytes("birthdate"));
assert !indexedCfs.isIndexBuilt(ByteBufferUtil.bytes("birthdate"));
// rebuild & re-query
future = cfs.indexManager.addIndexedColumn(cd);
future.get();
queryBirthdate(table);
}
private void queryBirthdate(Table table) throws CharacterCodingException
{
IndexExpression expr = new IndexExpression(ByteBufferUtil.bytes("birthdate"), IndexOperator.EQ, ByteBufferUtil.bytes(1L));
List<IndexExpression> clause = Arrays.asList(expr);
IFilter filter = new IdentityQueryFilter();
IPartitioner p = StorageService.getPartitioner();
List<Row> rows = table.getColumnFamilyStore("Indexed2").search(clause, Util.range("", ""), 100, filter);
assert rows.size() == 1 : StringUtils.join(rows, ",");
assertEquals("k1", ByteBufferUtil.string(rows.get(0).key.key));
}
@Test
public void testInclusiveBounds() throws IOException, ExecutionException, InterruptedException
{
ColumnFamilyStore cfs = insertKey1Key2();
IPartitioner p = StorageService.getPartitioner();
List<Row> result = cfs.getRangeSlice(ByteBufferUtil.EMPTY_BYTE_BUFFER,
Util.bounds("key1", "key2"),
10,
new NamesQueryFilter(ByteBufferUtil.bytes("asdf")),
null);
assertEquals(2, result.size());
assert result.get(0).key.key.equals(ByteBufferUtil.bytes("key1"));
}
@Test
public void testDeleteSuperRowSticksAfterFlush() throws Throwable
{
String tableName = "Keyspace1";
String cfName= "Super1";
ByteBuffer scfName = ByteBufferUtil.bytes("SuperDuper");
Table table = Table.open(tableName);
ColumnFamilyStore cfs = table.getColumnFamilyStore(cfName);
DecoratedKey key = Util.dk("flush-resurrection");
// create an isolated sstable.
putColsSuper(cfs, key, scfName,
new Column(getBytes(1L), ByteBufferUtil.bytes("val1"), 1),
new Column(getBytes(2L), ByteBufferUtil.bytes("val2"), 1),
new Column(getBytes(3L), ByteBufferUtil.bytes("val3"), 1));
cfs.forceBlockingFlush();
// insert, don't flush.
putColsSuper(cfs, key, scfName,
new Column(getBytes(4L), ByteBufferUtil.bytes("val4"), 1),
new Column(getBytes(5L), ByteBufferUtil.bytes("val5"), 1),
new Column(getBytes(6L), ByteBufferUtil.bytes("val6"), 1));
// verify insert.
final SlicePredicate sp = new SlicePredicate();
sp.setSlice_range(new SliceRange());
sp.getSlice_range().setCount(100);
sp.getSlice_range().setStart(ArrayUtils.EMPTY_BYTE_ARRAY);
sp.getSlice_range().setFinish(ArrayUtils.EMPTY_BYTE_ARRAY);
assertRowAndColCount(1, 6, scfName, false, cfs.getRangeSlice(scfName, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
// deeleet.
RowMutation rm = new RowMutation(table.name, key.key);
rm.delete(new QueryPath(cfName, scfName), 2);
rm.apply();
// verify delete.
assertRowAndColCount(1, 0, scfName, false, cfs.getRangeSlice(scfName, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
// flush
cfs.forceBlockingFlush();
// re-verify delete.
assertRowAndColCount(1, 0, scfName, false, cfs.getRangeSlice(scfName, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
// late insert.
putColsSuper(cfs, key, scfName,
new Column(getBytes(4L), ByteBufferUtil.bytes("val4"), 1L),
new Column(getBytes(7L), ByteBufferUtil.bytes("val7"), 1L));
// re-verify delete.
assertRowAndColCount(1, 0, scfName, false, cfs.getRangeSlice(scfName, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
// make sure new writes are recognized.
putColsSuper(cfs, key, scfName,
new Column(getBytes(3L), ByteBufferUtil.bytes("val3"), 3),
new Column(getBytes(8L), ByteBufferUtil.bytes("val8"), 3),
new Column(getBytes(9L), ByteBufferUtil.bytes("val9"), 3));
assertRowAndColCount(1, 3, scfName, false, cfs.getRangeSlice(scfName, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
}
private static void assertRowAndColCount(int rowCount, int colCount, ByteBuffer sc, boolean isDeleted, Collection<Row> rows) throws CharacterCodingException
{
assert rows.size() == rowCount : "rowcount " + rows.size();
for (Row row : rows)
{
assert row.cf != null : "cf was null";
if (sc != null)
assert row.cf.getColumn(sc).getSubColumns().size() == colCount : row.cf.getColumn(sc).getSubColumns().size();
else
assert row.cf.getColumnCount() == colCount : "colcount " + row.cf.getColumnCount() + "|" + str(row.cf);
if (isDeleted)
assert row.cf.isMarkedForDelete() : "cf not marked for delete";
}
}
private static String str(ColumnFamily cf) throws CharacterCodingException
{
StringBuilder sb = new StringBuilder();
for (IColumn col : cf.getSortedColumns())
sb.append(String.format("(%s,%s,%d),", ByteBufferUtil.string(col.name()), ByteBufferUtil.string(col.value()), col.timestamp()));
return sb.toString();
}
private static void putColsSuper(ColumnFamilyStore cfs, DecoratedKey key, ByteBuffer scfName, Column... cols) throws Throwable
{
RowMutation rm = new RowMutation(cfs.table.name, key.key);
ColumnFamily cf = ColumnFamily.create(cfs.table.name, cfs.getColumnFamilyName());
SuperColumn sc = new SuperColumn(scfName, cfs.metadata.subcolumnComparator);
for (Column col : cols)
sc.addColumn(col);
cf.addColumn(sc);
rm.add(cf);
rm.apply();
}
private static void putColsStandard(ColumnFamilyStore cfs, DecoratedKey key, Column... cols) throws Throwable
{
RowMutation rm = new RowMutation(cfs.table.name, key.key);
ColumnFamily cf = ColumnFamily.create(cfs.table.name, cfs.getColumnFamilyName());
for (Column col : cols)
cf.addColumn(col);
rm.add(cf);
rm.apply();
}
@Test
public void testDeleteStandardRowSticksAfterFlush() throws Throwable
{
// test to make sure flushing after a delete doesn't resurrect delted cols.
String tableName = "Keyspace1";
String cfName = "Standard1";
Table table = Table.open(tableName);
ColumnFamilyStore cfs = table.getColumnFamilyStore(cfName);
DecoratedKey key = Util.dk("f-flush-resurrection");
SlicePredicate sp = new SlicePredicate();
sp.setSlice_range(new SliceRange());
sp.getSlice_range().setCount(100);
sp.getSlice_range().setStart(ArrayUtils.EMPTY_BYTE_ARRAY);
sp.getSlice_range().setFinish(ArrayUtils.EMPTY_BYTE_ARRAY);
// insert
putColsStandard(cfs, key, column("col1", "val1", 1), column("col2", "val2", 1));
assertRowAndColCount(1, 2, null, false, cfs.getRangeSlice(null, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
// flush.
cfs.forceBlockingFlush();
// insert, don't flush
putColsStandard(cfs, key, column("col3", "val3", 1), column("col4", "val4", 1));
assertRowAndColCount(1, 4, null, false, cfs.getRangeSlice(null, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
// delete (from sstable and memtable)
RowMutation rm = new RowMutation(table.name, key.key);
rm.delete(new QueryPath(cfs.columnFamily, null, null), 2);
rm.apply();
// verify delete
assertRowAndColCount(1, 0, null, true, cfs.getRangeSlice(null, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
// flush
cfs.forceBlockingFlush();
// re-verify delete. // first breakage is right here because of CASSANDRA-1837.
assertRowAndColCount(1, 0, null, true, cfs.getRangeSlice(null, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
// simulate a 'late' insertion that gets put in after the deletion. should get inserted, but fail on read.
putColsStandard(cfs, key, column("col5", "val5", 1), column("col2", "val2", 1));
// should still be nothing there because we deleted this row. 2nd breakage, but was undetected because of 1837.
assertRowAndColCount(1, 0, null, true, cfs.getRangeSlice(null, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
// make sure that new writes are recognized.
putColsStandard(cfs, key, column("col6", "val6", 3), column("col7", "val7", 3));
assertRowAndColCount(1, 2, null, true, cfs.getRangeSlice(null, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
// and it remains so after flush. (this wasn't failing before, but it's good to check.)
cfs.forceBlockingFlush();
assertRowAndColCount(1, 2, null, true, cfs.getRangeSlice(null, Util.range("f", "g"), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null));
}
private ColumnFamilyStore insertKey1Key2() throws IOException, ExecutionException, InterruptedException
{
List<IMutation> rms = new LinkedList<IMutation>();
RowMutation rm;
rm = new RowMutation("Keyspace2", ByteBufferUtil.bytes("key1"));
rm.add(new QueryPath("Standard1", null, ByteBufferUtil.bytes("Column1")), ByteBufferUtil.bytes("asdf"), 0);
rms.add(rm);
Util.writeColumnFamily(rms);
rm = new RowMutation("Keyspace2", ByteBufferUtil.bytes("key2"));
rm.add(new QueryPath("Standard1", null, ByteBufferUtil.bytes("Column1")), ByteBufferUtil.bytes("asdf"), 0);
rms.add(rm);
return Util.writeColumnFamily(rms);
}
@Test
public void testBackupAfterFlush() throws Throwable
{
ColumnFamilyStore cfs = insertKey1Key2();
for (int version = 1; version <= 2; ++version)
{
Descriptor existing = new Descriptor(cfs.directories.getDirectoryForNewSSTables(1), "Keyspace2", "Standard1", version, false);
Descriptor desc = new Descriptor(Directories.getBackupsDirectory(existing), "Keyspace2", "Standard1", version, false);
for (Component c : new Component[]{ Component.DATA, Component.PRIMARY_INDEX, Component.FILTER, Component.STATS })
assertTrue("can not find backedup file:" + desc.filenameFor(c), new File(desc.filenameFor(c)).exists());
}
}
@Test
public void testSuperSliceByNamesCommand() throws Throwable
{
String tableName = "Keyspace1";
String cfName= "Super4";
ByteBuffer superColName = ByteBufferUtil.bytes("HerpDerp");
DecoratedKey key = Util.dk("multiget-slice-resurrection");
Table table = Table.open(tableName);
ColumnFamilyStore cfs = table.getColumnFamilyStore(cfName);
// Initially create a SC with 1 subcolumn
putColsSuper(cfs, key, superColName, new Column(ByteBufferUtil.bytes("c1"), ByteBufferUtil.bytes("a"), 1));
cfs.forceBlockingFlush();
// Add another column
putColsSuper(cfs, key, superColName, new Column(ByteBufferUtil.bytes("c2"), ByteBufferUtil.bytes("b"), 2));
// Test fetching the supercolumn by name
SliceByNamesReadCommand cmd = new SliceByNamesReadCommand(tableName, key.key, new QueryPath(cfName), Collections.singletonList(superColName));
ColumnFamily cf = cmd.getRow(table).cf;
SuperColumn superColumn = (SuperColumn) cf.getColumn(superColName);
assertColumns(superColumn, "c1", "c2");
}
// CASSANDRA-3467. the key here is that supercolumn and subcolumn comparators are different
@Test
public void testSliceByNamesCommandOnUUIDTypeSCF() throws Throwable
{
String tableName = "Keyspace1";
String cfName = "Super6";
ByteBuffer superColName = LexicalUUIDType.instance.fromString("a4ed3562-0e8e-4b41-bdfd-c45a2774682d");
Table table = Table.open(tableName);
ColumnFamilyStore cfs = table.getColumnFamilyStore(cfName);
DecoratedKey key = Util.dk("slice-get-uuid-type");
// Insert a row with one supercolumn and multiple subcolumns
putColsSuper(cfs, key, superColName, new Column(ByteBufferUtil.bytes("a"), ByteBufferUtil.bytes("A"), 1),
new Column(ByteBufferUtil.bytes("b"), ByteBufferUtil.bytes("B"), 1));
// Get the entire supercolumn like normal
IColumn columnGet = cfs.getColumnFamily(QueryFilter.getIdentityFilter(key, new QueryPath(cfName, superColName))).getColumn(superColName);
assertEquals(ByteBufferUtil.bytes("A"), columnGet.getSubColumn(ByteBufferUtil.bytes("a")).value());
assertEquals(ByteBufferUtil.bytes("B"), columnGet.getSubColumn(ByteBufferUtil.bytes("b")).value());
// Now do the SliceByNamesCommand on the supercolumn, passing both subcolumns in as columns to get
ArrayList<ByteBuffer> sliceColNames = new ArrayList<ByteBuffer>();
sliceColNames.add(ByteBufferUtil.bytes("a"));
sliceColNames.add(ByteBufferUtil.bytes("b"));
SliceByNamesReadCommand cmd = new SliceByNamesReadCommand(tableName, key.key, new QueryPath(cfName, superColName), sliceColNames);
IColumn columnSliced = cmd.getRow(table).cf.getColumn(superColName);
// Make sure the slice returns the same as the straight get
assertEquals(ByteBufferUtil.bytes("A"), columnSliced.getSubColumn(ByteBufferUtil.bytes("a")).value());
assertEquals(ByteBufferUtil.bytes("B"), columnSliced.getSubColumn(ByteBufferUtil.bytes("b")).value());
}
@Test
public void testSliceByNamesCommandOldMetatada() throws Throwable
{
String tableName = "Keyspace1";
String cfName= "Standard1";
DecoratedKey key = Util.dk("slice-name-old-metadata");
ByteBuffer cname = ByteBufferUtil.bytes("c1");
Table table = Table.open(tableName);
ColumnFamilyStore cfs = table.getColumnFamilyStore(cfName);
cfs.clearUnsafe();
// Create a column a 'high timestamp'
putColsStandard(cfs, key, new Column(cname, ByteBufferUtil.bytes("a"), 2));
cfs.forceBlockingFlush();
// Nuke the metadata and reload that sstable
Collection<SSTableReader> ssTables = cfs.getSSTables();
assertEquals(1, ssTables.size());
cfs.clearUnsafe();
assertEquals(0, cfs.getSSTables().size());
new File(ssTables.iterator().next().descriptor.filenameFor(SSTable.COMPONENT_STATS)).delete();
cfs.loadNewSSTables();
// Add another column with a lower timestamp
putColsStandard(cfs, key, new Column(cname, ByteBufferUtil.bytes("b"), 1));
// Test fetching the column by name returns the first column
SliceByNamesReadCommand cmd = new SliceByNamesReadCommand(tableName, key.key, new QueryPath(cfName), Collections.singletonList(cname));
ColumnFamily cf = cmd.getRow(table).cf;
Column column = (Column) cf.getColumn(cname);
assert column.value().equals(ByteBufferUtil.bytes("a")) : "expecting a, got " + ByteBufferUtil.string(column.value());
}
private static void assertTotalColCount(Collection<Row> rows, int expectedCount) throws CharacterCodingException
{
int columns = 0;
for (Row row : rows)
{
columns += row.getLiveColumnCount();
}
assert columns == expectedCount : "Expected " + expectedCount + " live columns but got " + columns + ": " + rows;
}
@Test
public void testRangeSliceColumnsLimit() throws Throwable
{
String tableName = "Keyspace1";
String cfName = "Standard1";
Table table = Table.open(tableName);
ColumnFamilyStore cfs = table.getColumnFamilyStore(cfName);
cfs.clearUnsafe();
Column[] cols = new Column[5];
for (int i = 0; i < 5; i++)
cols[i] = column("c" + i, "value", 1);
putColsStandard(cfs, Util.dk("a"), cols[0], cols[1], cols[2], cols[3], cols[4]);
putColsStandard(cfs, Util.dk("b"), cols[0], cols[1]);
putColsStandard(cfs, Util.dk("c"), cols[0], cols[1], cols[2], cols[3]);
cfs.forceBlockingFlush();
SlicePredicate sp = new SlicePredicate();
sp.setSlice_range(new SliceRange());
sp.getSlice_range().setCount(1);
sp.getSlice_range().setStart(ArrayUtils.EMPTY_BYTE_ARRAY);
sp.getSlice_range().setFinish(ArrayUtils.EMPTY_BYTE_ARRAY);
assertTotalColCount(cfs.getRangeSlice(null, Util.range("", ""), 3, QueryFilter.getFilter(sp, cfs.getComparator()), null, true), 3);
assertTotalColCount(cfs.getRangeSlice(null, Util.range("", ""), 5, QueryFilter.getFilter(sp, cfs.getComparator()), null, true), 5);
assertTotalColCount(cfs.getRangeSlice(null, Util.range("", ""), 8, QueryFilter.getFilter(sp, cfs.getComparator()), null, true), 8);
assertTotalColCount(cfs.getRangeSlice(null, Util.range("", ""), 10, QueryFilter.getFilter(sp, cfs.getComparator()), null, true), 10);
assertTotalColCount(cfs.getRangeSlice(null, Util.range("", ""), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null, true), 11);
// Check that when querying by name, we always include all names for a
// gien row even if it means returning more columns than requested (this is necesseray for CQL)
sp = new SlicePredicate();
sp.setColumn_names(Arrays.asList(
ByteBufferUtil.bytes("c0"),
ByteBufferUtil.bytes("c1"),
ByteBufferUtil.bytes("c2")
));
assertTotalColCount(cfs.getRangeSlice(null, Util.range("", ""), 1, QueryFilter.getFilter(sp, cfs.getComparator()), null, true), 3);
assertTotalColCount(cfs.getRangeSlice(null, Util.range("", ""), 4, QueryFilter.getFilter(sp, cfs.getComparator()), null, true), 5);
assertTotalColCount(cfs.getRangeSlice(null, Util.range("", ""), 5, QueryFilter.getFilter(sp, cfs.getComparator()), null, true), 5);
assertTotalColCount(cfs.getRangeSlice(null, Util.range("", ""), 6, QueryFilter.getFilter(sp, cfs.getComparator()), null, true), 8);
assertTotalColCount(cfs.getRangeSlice(null, Util.range("", ""), 100, QueryFilter.getFilter(sp, cfs.getComparator()), null, true), 8);
}
}
|
|
package org.apache.solr.search.grouping;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.grouping.AbstractAllGroupHeadsCollector;
import org.apache.lucene.search.grouping.term.TermAllGroupHeadsCollector;
import org.apache.lucene.util.FixedBitSet;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.search.BitDocSet;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.DocSetCollector;
import org.apache.solr.search.DocSetDelegateCollector;
import org.apache.solr.search.QueryUtils;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SolrIndexSearcher.ProcessedFilter;
import org.apache.solr.search.grouping.distributed.shardresultserializer.ShardResultTransformer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Responsible for executing a search with a number of {@link Command} instances.
* A typical search can have more then one {@link Command} instances.
*
* @lucene.experimental
*/
public class CommandHandler {
public static class Builder {
private SolrIndexSearcher.QueryCommand queryCommand;
private List<Command> commands = new ArrayList<Command>();
private SolrIndexSearcher searcher;
private boolean needDocSet = false;
private boolean truncateGroups = false;
private boolean includeHitCount = false;
public Builder setQueryCommand(SolrIndexSearcher.QueryCommand queryCommand) {
this.queryCommand = queryCommand;
this.needDocSet = (queryCommand.getFlags() & SolrIndexSearcher.GET_DOCSET) != 0;
return this;
}
public Builder addCommandField(Command commandField) {
commands.add(commandField);
return this;
}
public Builder setSearcher(SolrIndexSearcher searcher) {
this.searcher = searcher;
return this;
}
/**
* Sets whether to compute a {@link DocSet}.
* May override the value set by {@link #setQueryCommand(org.apache.solr.search.SolrIndexSearcher.QueryCommand)}.
*
* @param needDocSet Whether to compute a {@link DocSet}
* @return this
*/
public Builder setNeedDocSet(boolean needDocSet) {
this.needDocSet = needDocSet;
return this;
}
public Builder setTruncateGroups(boolean truncateGroups) {
this.truncateGroups = truncateGroups;
return this;
}
public Builder setIncludeHitCount(boolean includeHitCount) {
this.includeHitCount = includeHitCount;
return this;
}
public CommandHandler build() {
if (queryCommand == null || searcher == null) {
throw new IllegalStateException("All fields must be set");
}
return new CommandHandler(queryCommand, commands, searcher, needDocSet, truncateGroups, includeHitCount);
}
}
private final static Logger logger = LoggerFactory.getLogger(CommandHandler.class);
private final SolrIndexSearcher.QueryCommand queryCommand;
private final List<Command> commands;
private final SolrIndexSearcher searcher;
private final boolean needDocset;
private final boolean truncateGroups;
private final boolean includeHitCount;
private boolean partialResults = false;
private int totalHitCount;
private DocSet docSet;
private CommandHandler(SolrIndexSearcher.QueryCommand queryCommand,
List<Command> commands,
SolrIndexSearcher searcher,
boolean needDocset,
boolean truncateGroups,
boolean includeHitCount) {
this.queryCommand = queryCommand;
this.commands = commands;
this.searcher = searcher;
this.needDocset = needDocset;
this.truncateGroups = truncateGroups;
this.includeHitCount = includeHitCount;
}
@SuppressWarnings("unchecked")
public void execute() throws IOException {
final int nrOfCommands = commands.size();
List<Collector> collectors = new ArrayList<Collector>(nrOfCommands);
for (Command command : commands) {
collectors.addAll(command.create());
}
ProcessedFilter filter = searcher.getProcessedFilter
(queryCommand.getFilter(), queryCommand.getFilterList());
Query query = QueryUtils.makeQueryable(queryCommand.getQuery());
if (truncateGroups) {
docSet = computeGroupedDocSet(query, filter, collectors);
} else if (needDocset) {
docSet = computeDocSet(query, filter, collectors);
} else if (!collectors.isEmpty()) {
searchWithTimeLimiter(query, filter, MultiCollector.wrap(collectors.toArray(new Collector[nrOfCommands])));
} else {
searchWithTimeLimiter(query, filter, null);
}
}
private DocSet computeGroupedDocSet(Query query, ProcessedFilter filter, List<Collector> collectors) throws IOException {
Command firstCommand = commands.get(0);
AbstractAllGroupHeadsCollector termAllGroupHeadsCollector =
TermAllGroupHeadsCollector.create(firstCommand.getKey(), firstCommand.getSortWithinGroup());
if (collectors.isEmpty()) {
searchWithTimeLimiter(query, filter, termAllGroupHeadsCollector);
} else {
collectors.add(termAllGroupHeadsCollector);
searchWithTimeLimiter(query, filter, MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()])));
}
return new BitDocSet(termAllGroupHeadsCollector.retrieveGroupHeads(searcher.maxDoc()));
}
private DocSet computeDocSet(Query query, ProcessedFilter filter, List<Collector> collectors) throws IOException {
int maxDoc = searcher.maxDoc();
DocSetCollector docSetCollector;
if (collectors.isEmpty()) {
docSetCollector = new DocSetCollector(maxDoc >> 6, maxDoc);
} else {
Collector wrappedCollectors = MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()]));
docSetCollector = new DocSetDelegateCollector(maxDoc >> 6, maxDoc, wrappedCollectors);
}
searchWithTimeLimiter(query, filter, docSetCollector);
return docSetCollector.getDocSet();
}
@SuppressWarnings("unchecked")
public NamedList processResult(SolrIndexSearcher.QueryResult queryResult, ShardResultTransformer transformer) throws IOException {
if (docSet != null) {
queryResult.setDocSet(docSet);
}
queryResult.setPartialResults(partialResults);
return transformer.transform(commands);
}
/**
* Invokes search with the specified filter and collector.
* If a time limit has been specified then wrap the collector in the TimeLimitingCollector
*/
private void searchWithTimeLimiter(final Query query,
final ProcessedFilter filter,
Collector collector) throws IOException {
if (queryCommand.getTimeAllowed() > 0 ) {
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), queryCommand.getTimeAllowed());
}
TotalHitCountCollector hitCountCollector = new TotalHitCountCollector();
if (includeHitCount) {
collector = MultiCollector.wrap(collector, hitCountCollector);
}
Filter luceneFilter = filter.filter;
if (filter.postFilter != null) {
filter.postFilter.setLastDelegate(collector);
collector = filter.postFilter;
}
try {
searcher.search(query, luceneFilter, collector);
} catch (TimeLimitingCollector.TimeExceededException x) {
partialResults = true;
logger.warn( "Query: " + query + "; " + x.getMessage() );
}
if (includeHitCount) {
totalHitCount = hitCountCollector.getTotalHits();
}
}
public int getTotalHitCount() {
return totalHitCount;
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.ObjectOpenHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.Version;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.search.FieldDataTermsFilter;
import org.elasticsearch.index.similarity.SimilarityLookupService;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
import java.util.TreeMap;
/**
*
*/
public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
public static class Defaults {
public static final FieldType FIELD_TYPE = new FieldType();
public static final boolean PRE_2X_DOC_VALUES = false;
static {
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setStoreTermVectors(false);
FIELD_TYPE.setOmitNorms(false);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
FIELD_TYPE.freeze();
}
public static final float BOOST = 1.0f;
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
}
public abstract static class Builder<T extends Builder, Y extends AbstractFieldMapper> extends Mapper.Builder<T, Y> {
protected final FieldType fieldType;
private final IndexOptions defaultOptions;
protected Boolean docValues;
protected float boost = Defaults.BOOST;
protected boolean omitNormsSet = false;
protected String indexName;
protected NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected Boolean includeInAll;
protected boolean indexOptionsSet = false;
protected SimilarityProvider similarity;
protected Loading normsLoading;
@Nullable
protected Settings fieldDataSettings;
protected final MultiFields.Builder multiFieldsBuilder;
protected CopyTo copyTo;
protected Builder(String name, FieldType fieldType) {
super(name);
this.fieldType = fieldType;
this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable
multiFieldsBuilder = new MultiFields.Builder();
}
public T index(boolean index) {
if (index) {
if (fieldType.indexOptions() == IndexOptions.NONE) {
/*
* the logic here is to reset to the default options only if we are not indexed ie. options are null
* if the fieldType has a non-null option we are all good it might have been set through a different
* call.
*/
final IndexOptions options = getDefaultIndexOption();
assert options != IndexOptions.NONE : "default IndexOptions is NONE can't enable indexing";
fieldType.setIndexOptions(options);
}
} else {
fieldType.setIndexOptions(IndexOptions.NONE);
}
return builder;
}
protected IndexOptions getDefaultIndexOption() {
return defaultOptions;
}
public T store(boolean store) {
this.fieldType.setStored(store);
return builder;
}
public T docValues(boolean docValues) {
this.docValues = docValues;
return builder;
}
public T storeTermVectors(boolean termVectors) {
if (termVectors) {
this.fieldType.setStoreTermVectors(termVectors);
} // don't set it to false, it is default and might be flipped by a more specific option
return builder;
}
public T storeTermVectorOffsets(boolean termVectorOffsets) {
if (termVectorOffsets) {
this.fieldType.setStoreTermVectors(termVectorOffsets);
}
this.fieldType.setStoreTermVectorOffsets(termVectorOffsets);
return builder;
}
public T storeTermVectorPositions(boolean termVectorPositions) {
if (termVectorPositions) {
this.fieldType.setStoreTermVectors(termVectorPositions);
}
this.fieldType.setStoreTermVectorPositions(termVectorPositions);
return builder;
}
public T storeTermVectorPayloads(boolean termVectorPayloads) {
if (termVectorPayloads) {
this.fieldType.setStoreTermVectors(termVectorPayloads);
}
this.fieldType.setStoreTermVectorPayloads(termVectorPayloads);
return builder;
}
public T tokenized(boolean tokenized) {
this.fieldType.setTokenized(tokenized);
return builder;
}
public T boost(float boost) {
this.boost = boost;
return builder;
}
public T omitNorms(boolean omitNorms) {
this.fieldType.setOmitNorms(omitNorms);
this.omitNormsSet = true;
return builder;
}
public T indexOptions(IndexOptions indexOptions) {
this.fieldType.setIndexOptions(indexOptions);
this.indexOptionsSet = true;
return builder;
}
public T indexName(String indexName) {
this.indexName = indexName;
return builder;
}
public T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
return builder;
}
public T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
return builder;
}
public T includeInAll(Boolean includeInAll) {
this.includeInAll = includeInAll;
return builder;
}
public T similarity(SimilarityProvider similarity) {
this.similarity = similarity;
return builder;
}
public T normsLoading(Loading normsLoading) {
this.normsLoading = normsLoading;
return builder;
}
public T fieldDataSettings(Settings settings) {
this.fieldDataSettings = settings;
return builder;
}
public T multiFieldPathType(ContentPath.Type pathType) {
multiFieldsBuilder.pathType(pathType);
return builder;
}
public T addMultiField(Mapper.Builder mapperBuilder) {
multiFieldsBuilder.add(mapperBuilder);
return builder;
}
public T copyTo(CopyTo copyTo) {
this.copyTo = copyTo;
return builder;
}
protected Names buildNames(BuilderContext context) {
return new Names(name, buildIndexName(context), buildIndexNameClean(context), buildFullName(context), context.path().sourcePath());
}
protected String buildIndexName(BuilderContext context) {
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)) {
return buildFullName(context);
}
String actualIndexName = indexName == null ? name : indexName;
return context.path().pathAsText(actualIndexName);
}
protected String buildIndexNameClean(BuilderContext context) {
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)) {
return buildFullName(context);
}
return indexName == null ? name : indexName;
}
protected String buildFullName(BuilderContext context) {
return context.path().fullPathAsText(name);
}
}
protected final Names names;
protected float boost;
protected FieldType fieldType;
protected final Boolean docValues;
protected final NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected final SimilarityProvider similarity;
protected Loading normsLoading;
protected Settings customFieldDataSettings;
protected FieldDataType fieldDataType;
protected final MultiFields multiFields;
protected CopyTo copyTo;
protected final boolean indexCreatedBefore2x;
protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer,
NamedAnalyzer searchAnalyzer, SimilarityProvider similarity,
Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings) {
this(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, similarity,
normsLoading, fieldDataSettings, indexSettings, MultiFields.empty(), null);
}
protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer,
NamedAnalyzer searchAnalyzer, SimilarityProvider similarity,
Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
assert indexSettings != null;
this.names = names;
this.boost = boost;
this.fieldType = fieldType;
this.fieldType.freeze();
this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0);
boolean indexedNotAnalyzed = this.fieldType.tokenized() == false && this.fieldType.indexOptions() != IndexOptions.NONE;
if (indexAnalyzer == null && indexedNotAnalyzed) {
this.indexAnalyzer = this.searchAnalyzer = Lucene.KEYWORD_ANALYZER;
} else {
this.indexAnalyzer = indexAnalyzer;
this.searchAnalyzer = searchAnalyzer;
}
this.similarity = similarity;
this.normsLoading = normsLoading;
this.customFieldDataSettings = fieldDataSettings;
if (fieldDataSettings == null) {
this.fieldDataType = defaultFieldDataType();
} else {
// create a new field data type, with the default settings as well as the "new ones"
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
ImmutableSettings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings)
);
}
if (docValues != null) {
// explicitly set
this.docValues = docValues;
} else if (fieldDataType != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldDataType.getFormat(indexSettings))) {
// convoluted way to enable doc values, should be removed in the future
this.docValues = true;
} else {
this.docValues = null; // use the default
}
this.multiFields = multiFields;
this.copyTo = copyTo;
}
protected boolean defaultDocValues() {
if (indexCreatedBefore2x) {
return Defaults.PRE_2X_DOC_VALUES;
} else {
return fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE;
}
}
@Override
public final boolean hasDocValues() {
return docValues == null ? defaultDocValues() : docValues;
}
@Override
public String name() {
return names.name();
}
@Override
public Names names() {
return this.names;
}
public abstract FieldType defaultFieldType();
public abstract FieldDataType defaultFieldDataType();
@Override
public final FieldDataType fieldDataType() {
return fieldDataType;
}
@Override
public FieldType fieldType() {
return fieldType;
}
@Override
public float boost() {
return this.boost;
}
@Override
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
@Override
public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
@Override
public Analyzer searchQuoteAnalyzer() {
return this.searchAnalyzer;
}
@Override
public SimilarityProvider similarity() {
return similarity;
}
@Override
public CopyTo copyTo() {
return copyTo;
}
@Override
public Mapper parse(ParseContext context) throws IOException {
final List<Field> fields = new ArrayList<>(2);
try {
parseCreateField(context, fields);
for (Field field : fields) {
if (!customBoost()) {
field.setBoost(boost);
}
if (context.listener().beforeFieldAdded(this, field, context)) {
context.doc().add(field);
}
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
}
multiFields.parse(this, context);
if (copyTo != null) {
copyTo.parse(context);
}
return null;
}
/**
* Parse the field value and populate <code>fields</code>.
*/
protected abstract void parseCreateField(ParseContext context, List<Field> fields) throws IOException;
/**
* Derived classes can override it to specify that boost value is set by derived classes.
*/
protected boolean customBoost() {
return false;
}
@Override
public void traverse(FieldMapperListener fieldMapperListener) {
fieldMapperListener.fieldMapper(this);
multiFields.traverse(fieldMapperListener);
}
@Override
public void traverse(ObjectMapperListener objectMapperListener) {
// nothing to do here...
}
@Override
public Object valueForSearch(Object value) {
return value;
}
@Override
public BytesRef indexedValueForSearch(Object value) {
return BytesRefs.toBytesRef(value);
}
@Override
public Query queryStringTermQuery(Term term) {
return null;
}
@Override
public boolean useTermQueryWithQueryString() {
return false;
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
return new TermQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
}
@Override
public Filter termFilter(Object value, @Nullable QueryParseContext context) {
return Queries.wrap(new TermQuery(names().createIndexNameTerm(indexedValueForSearch(value))));
}
@Override
public Filter termsFilter(List values, @Nullable QueryParseContext context) {
switch (values.size()) {
case 0:
return Queries.newMatchNoDocsFilter();
case 1:
// When there is a single term, it's important to return a term filter so that
// it can return a DocIdSet that is directly backed by a postings list, instead
// of loading everything into a bit set and returning an iterator based on the
// bit set
return termFilter(values.get(0), context);
default:
BytesRef[] bytesRefs = new BytesRef[values.size()];
for (int i = 0; i < bytesRefs.length; i++) {
bytesRefs[i] = indexedValueForSearch(values.get(i));
}
return Queries.wrap(new TermsQuery(names.indexName(), bytesRefs));
}
}
/**
* A terms filter based on the field data cache
*/
@Override
public Filter fieldDataTermsFilter(List values, @Nullable QueryParseContext context) {
// create with initial size large enough to avoid rehashing
ObjectOpenHashSet<BytesRef> terms =
new ObjectOpenHashSet<>((int) (values.size() * (1 + ObjectOpenHashSet.DEFAULT_LOAD_FACTOR)));
for (int i = 0, len = values.size(); i < len; i++) {
terms.add(indexedValueForSearch(values.get(i)));
}
return FieldDataTermsFilter.newBytes(context.getForField(this), terms);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeQuery(names.indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter rangeFilter(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return Queries.wrap(new TermRangeQuery(names.indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper));
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
return new FuzzyQuery(names.createIndexNameTerm(indexedValueForSearch(value)), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);
}
@Override
public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
PrefixQuery query = new PrefixQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
@Override
public Filter prefixFilter(Object value, @Nullable QueryParseContext context) {
return Queries.wrap(new PrefixQuery(names().createIndexNameTerm(indexedValueForSearch(value))));
}
@Override
public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
RegexpQuery query = new RegexpQuery(names().createIndexNameTerm(indexedValueForSearch(value)), flags, maxDeterminizedStates);
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
@Override
public Filter regexpFilter(Object value, int flags, int maxDeterminizedStates, @Nullable QueryParseContext parseContext) {
return Queries.wrap(new RegexpQuery(names().createIndexNameTerm(indexedValueForSearch(value)), flags, maxDeterminizedStates));
}
@Override
public Filter nullValueFilter() {
return null;
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
if (!this.getClass().equals(mergeWith.getClass())) {
String mergedType = mergeWith.getClass().getSimpleName();
if (mergeWith instanceof AbstractFieldMapper) {
mergedType = ((AbstractFieldMapper) mergeWith).contentType();
}
mergeResult.addConflict("mapper [" + names.fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
// different types, return
return;
}
AbstractFieldMapper fieldMergeWith = (AbstractFieldMapper) mergeWith;
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
boolean mergeWithIndexed = fieldMergeWith.fieldType().indexOptions() != IndexOptions.NONE;
if (indexed != mergeWithIndexed || this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different index values");
}
if (this.fieldType().stored() != fieldMergeWith.fieldType().stored()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store values");
}
if (!this.hasDocValues() && fieldMergeWith.hasDocValues()) {
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set
// when the doc_values field data format is configured
mergeResult.addConflict("mapper [" + names.fullName() + "] has different " + TypeParsers.DOC_VALUES + " values");
}
if (this.fieldType().omitNorms() && !fieldMergeWith.fieldType.omitNorms()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] cannot enable norms (`norms.enabled`)");
}
if (this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different tokenize values");
}
if (this.fieldType().storeTermVectors() != fieldMergeWith.fieldType().storeTermVectors()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector values");
}
if (this.fieldType().storeTermVectorOffsets() != fieldMergeWith.fieldType().storeTermVectorOffsets()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_offsets values");
}
if (this.fieldType().storeTermVectorPositions() != fieldMergeWith.fieldType().storeTermVectorPositions()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_positions values");
}
if (this.fieldType().storeTermVectorPayloads() != fieldMergeWith.fieldType().storeTermVectorPayloads()) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_payloads values");
}
// null and "default"-named index analyzers both mean the default is used
if (this.indexAnalyzer == null || "default".equals(this.indexAnalyzer.name())) {
if (fieldMergeWith.indexAnalyzer != null && !"default".equals(fieldMergeWith.indexAnalyzer.name())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer");
}
} else if (fieldMergeWith.indexAnalyzer == null || "default".equals(fieldMergeWith.indexAnalyzer.name())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer");
} else if (!this.indexAnalyzer.name().equals(fieldMergeWith.indexAnalyzer.name())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer");
}
if (!this.names().equals(fieldMergeWith.names())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different index_name");
}
if (this.similarity == null) {
if (fieldMergeWith.similarity() != null) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity");
}
} else if (fieldMergeWith.similarity() == null) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity");
} else if (!this.similarity().equals(fieldMergeWith.similarity())) {
mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity");
}
multiFields.merge(mergeWith, mergeResult);
if (!mergeResult.simulate()) {
// apply changeable values
this.fieldType = new FieldType(this.fieldType);
this.fieldType.setOmitNorms(fieldMergeWith.fieldType.omitNorms());
this.fieldType.freeze();
this.boost = fieldMergeWith.boost;
this.normsLoading = fieldMergeWith.normsLoading;
this.copyTo = fieldMergeWith.copyTo;
if (fieldMergeWith.searchAnalyzer != null) {
this.searchAnalyzer = fieldMergeWith.searchAnalyzer;
}
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
ImmutableSettings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
);
}
}
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(names.name());
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
doXContentBody(builder, includeDefaults, params);
return builder.endObject();
}
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
if (indexCreatedBefore2x && (includeDefaults || !names.name().equals(names.indexNameClean()))) {
builder.field("index_name", names.indexNameClean());
}
if (includeDefaults || boost != 1.0f) {
builder.field("boost", boost);
}
FieldType defaultFieldType = defaultFieldType();
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
boolean defaultIndexed = defaultFieldType.indexOptions() != IndexOptions.NONE;
if (includeDefaults || indexed != defaultIndexed ||
fieldType.tokenized() != defaultFieldType.tokenized()) {
builder.field("index", indexTokenizeOptionToString(indexed, fieldType.tokenized()));
}
if (includeDefaults || fieldType.stored() != defaultFieldType.stored()) {
builder.field("store", fieldType.stored());
}
doXContentDocValues(builder, includeDefaults);
if (includeDefaults || fieldType.storeTermVectors() != defaultFieldType.storeTermVectors()) {
builder.field("term_vector", termVectorOptionsToString(fieldType));
}
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms() || normsLoading != null) {
builder.startObject("norms");
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms()) {
builder.field("enabled", !fieldType.omitNorms());
}
if (normsLoading != null) {
builder.field(Loading.KEY, normsLoading);
}
builder.endObject();
}
if (indexed && (includeDefaults || fieldType.indexOptions() != defaultFieldType.indexOptions())) {
builder.field("index_options", indexOptionToString(fieldType.indexOptions()));
}
doXContentAnalyzers(builder, includeDefaults);
if (similarity() != null) {
builder.field("similarity", similarity().name());
} else if (includeDefaults) {
builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY);
}
TreeMap<String, Object> orderedFielddataSettings = new TreeMap<>();
if (customFieldDataSettings != null) {
orderedFielddataSettings.putAll(customFieldDataSettings.getAsMap());
builder.field("fielddata", orderedFielddataSettings);
} else if (includeDefaults) {
orderedFielddataSettings.putAll(fieldDataType.getSettings().getAsMap());
builder.field("fielddata", orderedFielddataSettings);
}
multiFields.toXContent(builder, params);
if (copyTo != null) {
copyTo.toXContent(builder, params);
}
}
protected void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException {
if (indexAnalyzer == null) {
if (includeDefaults) {
builder.field("analyzer", "default");
}
} else if (includeDefaults || indexAnalyzer.name().startsWith("_") == false && indexAnalyzer.name().equals("default") == false) {
builder.field("analyzer", indexAnalyzer.name());
if (searchAnalyzer.name().equals(indexAnalyzer.name()) == false) {
builder.field("search_analyzer", searchAnalyzer.name());
}
}
}
protected void doXContentDocValues(XContentBuilder builder, boolean includeDefaults) throws IOException {
if (includeDefaults || docValues != null) {
builder.field(TypeParsers.DOC_VALUES, hasDocValues());
}
}
protected static String indexOptionToString(IndexOptions indexOption) {
switch (indexOption) {
case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS:
return TypeParsers.INDEX_OPTIONS_OFFSETS;
case DOCS_AND_FREQS:
return TypeParsers.INDEX_OPTIONS_FREQS;
case DOCS_AND_FREQS_AND_POSITIONS:
return TypeParsers.INDEX_OPTIONS_POSITIONS;
case DOCS:
return TypeParsers.INDEX_OPTIONS_DOCS;
default:
throw new ElasticsearchIllegalArgumentException("Unknown IndexOptions [" + indexOption + "]");
}
}
public static String termVectorOptionsToString(FieldType fieldType) {
if (!fieldType.storeTermVectors()) {
return "no";
} else if (!fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
return "yes";
} else if (fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
return "with_offsets";
} else {
StringBuilder builder = new StringBuilder("with");
if (fieldType.storeTermVectorPositions()) {
builder.append("_positions");
}
if (fieldType.storeTermVectorOffsets()) {
builder.append("_offsets");
}
if (fieldType.storeTermVectorPayloads()) {
builder.append("_payloads");
}
return builder.toString();
}
}
protected static String indexTokenizeOptionToString(boolean indexed, boolean tokenized) {
if (!indexed) {
return "no";
} else if (tokenized) {
return "analyzed";
} else {
return "not_analyzed";
}
}
protected abstract String contentType();
@Override
public void close() {
multiFields.close();
}
@Override
public boolean isNumeric() {
return false;
}
@Override
public boolean isSortable() {
return true;
}
@Override
public boolean supportsNullValue() {
return true;
}
@Override
public Loading normsLoading(Loading defaultLoading) {
return normsLoading == null ? defaultLoading : normsLoading;
}
public static class MultiFields {
public static MultiFields empty() {
return new MultiFields(Defaults.PATH_TYPE, ImmutableOpenMap.<String, Mapper>of());
}
public static class Builder {
private final ImmutableOpenMap.Builder<String, Mapper.Builder> mapperBuilders = ImmutableOpenMap.builder();
private ContentPath.Type pathType = Defaults.PATH_TYPE;
public Builder pathType(ContentPath.Type pathType) {
this.pathType = pathType;
return this;
}
public Builder add(Mapper.Builder builder) {
mapperBuilders.put(builder.name(), builder);
return this;
}
@SuppressWarnings("unchecked")
public MultiFields build(AbstractFieldMapper.Builder mainFieldBuilder, BuilderContext context) {
if (pathType == Defaults.PATH_TYPE && mapperBuilders.isEmpty()) {
return empty();
} else if (mapperBuilders.isEmpty()) {
return new MultiFields(pathType, ImmutableOpenMap.<String, Mapper>of());
} else {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainFieldBuilder.name());
ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders;
for (ObjectObjectCursor<String, Mapper.Builder> cursor : this.mapperBuilders) {
String key = cursor.key;
Mapper.Builder value = cursor.value;
mapperBuilders.put(key, value.build(context));
}
context.path().remove();
context.path().pathType(origPathType);
ImmutableOpenMap.Builder<String, Mapper> mappers = mapperBuilders.cast();
return new MultiFields(pathType, mappers.build());
}
}
}
private final ContentPath.Type pathType;
private volatile ImmutableOpenMap<String, Mapper> mappers;
public MultiFields(ContentPath.Type pathType, ImmutableOpenMap<String, Mapper> mappers) {
this.pathType = pathType;
this.mappers = mappers;
// we disable the all in multi-field mappers
for (ObjectCursor<Mapper> cursor : mappers.values()) {
Mapper mapper = cursor.value;
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
}
}
}
public void parse(AbstractFieldMapper mainField, ParseContext context) throws IOException {
if (mappers.isEmpty()) {
return;
}
context = context.createMultiFieldContext();
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainField.name());
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.parse(context);
}
context.path().remove();
context.path().pathType(origPathType);
}
// No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
AbstractFieldMapper mergeWithMultiField = (AbstractFieldMapper) mergeWith;
List<FieldMapper<?>> newFieldMappers = null;
ImmutableOpenMap.Builder<String, Mapper> newMappersBuilder = null;
for (ObjectCursor<Mapper> cursor : mergeWithMultiField.multiFields.mappers.values()) {
Mapper mergeWithMapper = cursor.value;
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeResult.simulate()) {
// we disable the all in multi-field mappers
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
}
if (newMappersBuilder == null) {
newMappersBuilder = ImmutableOpenMap.builder(mappers);
}
newMappersBuilder.put(mergeWithMapper.name(), mergeWithMapper);
if (mergeWithMapper instanceof AbstractFieldMapper) {
if (newFieldMappers == null) {
newFieldMappers = new ArrayList<>(2);
}
newFieldMappers.add((FieldMapper) mergeWithMapper);
}
}
} else {
mergeIntoMapper.merge(mergeWithMapper, mergeResult);
}
}
// first add all field mappers
if (newFieldMappers != null) {
mergeResult.addFieldMappers(newFieldMappers);
}
// now publish mappers
if (newMappersBuilder != null) {
mappers = newMappersBuilder.build();
}
}
public void traverse(FieldMapperListener fieldMapperListener) {
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.traverse(fieldMapperListener);
}
}
public void close() {
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.close();
}
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (pathType != Defaults.PATH_TYPE) {
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
}
if (!mappers.isEmpty()) {
// sort the mappers so we get consistent serialization format
Mapper[] sortedMappers = mappers.values().toArray(Mapper.class);
Arrays.sort(sortedMappers, new Comparator<Mapper>() {
@Override
public int compare(Mapper o1, Mapper o2) {
return o1.name().compareTo(o2.name());
}
});
builder.startObject("fields");
for (Mapper mapper : sortedMappers) {
mapper.toXContent(builder, params);
}
builder.endObject();
}
return builder;
}
}
/**
* Represents a list of fields with optional boost factor where the current field should be copied to
*/
public static class CopyTo {
private final ImmutableList<String> copyToFields;
private CopyTo(ImmutableList<String> copyToFields) {
this.copyToFields = copyToFields;
}
/**
* Creates instances of the fields that the current field should be copied to
*/
public void parse(ParseContext context) throws IOException {
if (!context.isWithinCopyTo() && copyToFields.isEmpty() == false) {
context = context.createCopyToContext();
for (String field : copyToFields) {
// In case of a hierarchy of nested documents, we need to figure out
// which document the field should go to
Document targetDoc = null;
for (Document doc = context.doc(); doc != null; doc = doc.getParent()) {
if (field.startsWith(doc.getPrefix())) {
targetDoc = doc;
break;
}
}
assert targetDoc != null;
final ParseContext copyToContext;
if (targetDoc == context.doc()) {
copyToContext = context;
} else {
copyToContext = context.switchDoc(targetDoc);
}
parse(field, copyToContext);
}
}
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!copyToFields.isEmpty()) {
builder.startArray("copy_to");
for (String field : copyToFields) {
builder.value(field);
}
builder.endArray();
}
return builder;
}
public static class Builder {
private final ImmutableList.Builder<String> copyToBuilders = ImmutableList.builder();
public Builder add(String field) {
copyToBuilders.add(field);
return this;
}
public CopyTo build() {
return new CopyTo(copyToBuilders.build());
}
}
public ImmutableList<String> copyToFields() {
return copyToFields;
}
/**
* Creates an copy of the current field with given field name and boost
*/
public void parse(String field, ParseContext context) throws IOException {
FieldMappers mappers = context.docMapper().mappers().indexName(field);
if (mappers != null && !mappers.isEmpty()) {
mappers.mapper().parse(context);
} else {
// The path of the dest field might be completely different from the current one so we need to reset it
context = context.overridePath(new ContentPath(0));
ObjectMapper mapper = context.root();
String objectPath = "";
String fieldPath = field;
int posDot = field.lastIndexOf('.');
if (posDot > 0) {
objectPath = field.substring(0, posDot);
context.path().add(objectPath);
mapper = context.docMapper().objectMappers().get(objectPath);
fieldPath = field.substring(posDot + 1);
}
if (mapper == null) {
//TODO: Create an object dynamically?
throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]");
}
ObjectMapper update = mapper.parseDynamicValue(context, fieldPath, context.parser().currentToken());
assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping
// propagate the update to the root
while (objectPath.length() > 0) {
String parentPath = "";
ObjectMapper parent = context.root();
posDot = objectPath.lastIndexOf('.');
if (posDot > 0) {
parentPath = objectPath.substring(0, posDot);
parent = context.docMapper().objectMappers().get(parentPath);
}
if (parent == null) {
throw new ElasticsearchIllegalStateException("[" + objectPath + "] has no parent for path [" + parentPath + "]");
}
update = parent.mappingUpdate(update);
objectPath = parentPath;
}
context.addDynamicMappingsUpdate((RootObjectMapper) update);
}
}
}
/**
* Returns if this field is only generated when indexing. For example, the field of type token_count
*/
@Override
public boolean isGenerated() {
return false;
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
return new FieldStats.Text(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), terms.getMin(), terms.getMax()
);
}
}
|
|
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.durableexecutor;
import com.hazelcast.cluster.Member;
import com.hazelcast.config.Config;
import com.hazelcast.config.DurableExecutorConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.HazelcastInstanceAware;
import com.hazelcast.cp.IAtomicLong;
import com.hazelcast.cp.ICountDownLatch;
import com.hazelcast.durableexecutor.impl.DistributedDurableExecutorService;
import com.hazelcast.durableexecutor.impl.DurableExecutorContainer;
import com.hazelcast.executor.ExecutorServiceTestSupport;
import com.hazelcast.internal.partition.InternalPartitionService;
import com.hazelcast.partition.PartitionAware;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.spi.properties.ClusterProperty;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import static com.hazelcast.durableexecutor.impl.DurableExecutorServiceHelper.getDurableExecutorContainer;
import static com.hazelcast.internal.metrics.MetricDescriptorConstants.DURABLE_EXECUTOR_PREFIX;
import static com.hazelcast.scheduledexecutor.impl.ScheduledExecutorServiceBasicTest.assertMetricsCollected;
import static com.hazelcast.scheduledexecutor.impl.ScheduledExecutorServiceBasicTest.collectMetrics;
import static com.hazelcast.test.Accessors.getNodeEngineImpl;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelJVMTest.class})
public class DurableExecutorServiceTest extends ExecutorServiceTestSupport {
private static final int NODE_COUNT = 3;
private static final int TASK_COUNT = 1000;
@Test(expected = UnsupportedOperationException.class)
public void testInvokeAll() throws Exception {
HazelcastInstance instance = createHazelcastInstance(smallInstanceConfig());
DurableExecutorService service = instance.getDurableExecutorService(randomString());
List<BasicTestCallable> callables = Collections.emptyList();
service.invokeAll(callables);
}
@Test(expected = UnsupportedOperationException.class)
public void testInvokeAll_WithTimeout() throws Exception {
HazelcastInstance instance = createHazelcastInstance(smallInstanceConfig());
DurableExecutorService service = instance.getDurableExecutorService(randomString());
List<BasicTestCallable> callables = Collections.emptyList();
service.invokeAll(callables, 1, TimeUnit.SECONDS);
}
@Test(expected = UnsupportedOperationException.class)
public void testInvokeAny() throws Exception {
HazelcastInstance instance = createHazelcastInstance(smallInstanceConfig());
DurableExecutorService service = instance.getDurableExecutorService(randomString());
List<BasicTestCallable> callables = Collections.emptyList();
service.invokeAny(callables);
}
@Test(expected = UnsupportedOperationException.class)
public void testInvokeAny_WithTimeout() throws Exception {
HazelcastInstance instance = createHazelcastInstance(smallInstanceConfig());
DurableExecutorService service = instance.getDurableExecutorService(randomString());
List<BasicTestCallable> callables = Collections.emptyList();
service.invokeAny(callables, 1, TimeUnit.SECONDS);
}
@Test
public void testDestroyCleansAllContainers() throws Exception {
String executorName = randomMapName();
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(NODE_COUNT);
HazelcastInstance[] instances = factory.newInstances(smallInstanceConfig());
DurableExecutorService service = instances[0].getDurableExecutorService(executorName);
InternalPartitionService partitionService = getNodeEngineImpl(instances[0]).getPartitionService();
List<Future<String>> futures = new ArrayList<>(TASK_COUNT);
for (int i = 0; i < TASK_COUNT; i++) {
futures.add(service.submit(new DummyCallable()));
}
for (Future<String> future : futures) {
future.get();
}
// wait for all backup operations to complete
assertTrueEventually(() -> {
for (HazelcastInstance instance : instances) {
NodeEngineImpl ne = getNodeEngineImpl(instance);
DistributedDurableExecutorService internalService = ne.getService(DistributedDurableExecutorService.SERVICE_NAME);
for (int partitionId = 0; partitionId < partitionService.getPartitionCount(); partitionId++) {
DurableExecutorContainer container = getDurableExecutorContainer(internalService, partitionId, executorName);
if (container != null) {
assertEquals(0, container.getRingBuffer().getTaskSize());
}
}
}
});
service.destroy();
assertTrueEventually(() -> {
for (HazelcastInstance instance : instances) {
NodeEngineImpl ne = getNodeEngineImpl(instance);
DistributedDurableExecutorService internalService = ne.getService(DistributedDurableExecutorService.SERVICE_NAME);
boolean allEmpty = true;
StringBuilder failMessage = new StringBuilder();
for (int partitionId = 0; partitionId < partitionService.getPartitionCount(); partitionId++) {
DurableExecutorContainer container = getDurableExecutorContainer(internalService, partitionId, executorName);
if (container != null) {
failMessage.append(String.format("Partition %d owned by %s on %s\n",
partitionId, partitionService.getPartition(partitionId).getOwnerOrNull(), instance));
allEmpty = false;
}
}
assertTrue(String.format("Some partitions have non-null containers for executor %s:\n%s",
executorName,
failMessage.toString()), allEmpty);
}
}, 30);
}
@Test
public void testAwaitTermination() throws Exception {
HazelcastInstance instance = createHazelcastInstance(smallInstanceConfig());
DurableExecutorService service = instance.getDurableExecutorService(randomString());
assertFalse(service.awaitTermination(1, TimeUnit.SECONDS));
}
@Test
public void testFullRingBuffer() throws Exception {
String name = randomString();
String key = randomString();
Config config = smallInstanceConfig();
config.getDurableExecutorConfig(name).setCapacity(1);
HazelcastInstance instance = createHazelcastInstance(config);
DurableExecutorService service = instance.getDurableExecutorService(name);
service.submitToKeyOwner(new SleepingTask(100), key);
DurableExecutorServiceFuture<String> future = service.submitToKeyOwner(new BasicTestCallable(), key);
try {
future.get();
fail();
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof RejectedExecutionException);
}
}
@Test
public void test_registerCallback_beforeFutureIsCompletedOnOtherNode() throws Exception {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = factory.newHazelcastInstance(smallInstanceConfig());
HazelcastInstance instance2 = factory.newHazelcastInstance(smallInstanceConfig());
assertTrue(instance1.getCPSubsystem().getCountDownLatch("latch").trySetCount(1));
String name = randomString();
DurableExecutorService executorService = instance2.getDurableExecutorService(name);
ICountDownLatchAwaitCallable task = new ICountDownLatchAwaitCallable("latch");
String key = generateKeyOwnedBy(instance1);
DurableExecutorServiceFuture<Boolean> future = executorService.submitToKeyOwner(task, key);
CountingDownExecutionCallback<Boolean> callback = new CountingDownExecutionCallback<>(1);
future.whenCompleteAsync(callback);
instance1.getCPSubsystem().getCountDownLatch("latch").countDown();
assertTrue(future.get());
assertOpenEventually(callback.getLatch());
}
@Test
public void test_registerCallback_afterFutureIsCompletedOnOtherNode() throws Exception {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = factory.newHazelcastInstance(smallInstanceConfig());
HazelcastInstance instance2 = factory.newHazelcastInstance(smallInstanceConfig());
String name = randomString();
DurableExecutorService executorService = instance2.getDurableExecutorService(name);
BasicTestCallable task = new BasicTestCallable();
String key = generateKeyOwnedBy(instance1);
DurableExecutorServiceFuture<String> future = executorService.submitToKeyOwner(task, key);
assertEquals(BasicTestCallable.RESULT, future.get());
CountingDownExecutionCallback<String> callback = new CountingDownExecutionCallback<>(1);
future.whenCompleteAsync(callback);
assertOpenEventually(callback.getLatch(), 10);
}
@Test
public void test_registerCallback_multipleTimes_futureIsCompletedOnOtherNode() throws Exception {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = factory.newHazelcastInstance(smallInstanceConfig());
HazelcastInstance instance2 = factory.newHazelcastInstance(smallInstanceConfig());
assertTrue(instance1.getCPSubsystem().getCountDownLatch("latch").trySetCount(1));
String name = randomString();
DurableExecutorService executorService = instance2.getDurableExecutorService(name);
ICountDownLatchAwaitCallable task = new ICountDownLatchAwaitCallable("latch");
String key = generateKeyOwnedBy(instance1);
DurableExecutorServiceFuture<Boolean> future = executorService.submitToKeyOwner(task, key);
CountDownLatch latch = new CountDownLatch(2);
CountingDownExecutionCallback<Boolean> callback = new CountingDownExecutionCallback<>(latch);
future.whenCompleteAsync(callback);
future.whenCompleteAsync(callback);
instance1.getCPSubsystem().getCountDownLatch("latch").countDown();
assertTrue(future.get());
assertOpenEventually(latch, 10);
}
@Test
public void testSubmitFailingCallableException_withExecutionCallback() {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(1);
HazelcastInstance instance = factory.newHazelcastInstance(smallInstanceConfig());
DurableExecutorService service = instance.getDurableExecutorService(randomString());
CountingDownExecutionCallback<String> callback = new CountingDownExecutionCallback<>(1);
service.submit(new FailingTestTask()).whenCompleteAsync(callback);
assertOpenEventually(callback.getLatch());
assertTrue(callback.getResult() instanceof Throwable);
}
/* ############ submit runnable ############ */
@Test
public void testManagedContextAndLocal() throws Exception {
Config config = smallInstanceConfig();
config.addDurableExecutorConfig(new DurableExecutorConfig("test").setPoolSize(1));
final AtomicBoolean initialized = new AtomicBoolean();
config.setManagedContext(obj -> {
if (obj instanceof RunnableWithManagedContext) {
initialized.set(true);
}
return obj;
});
HazelcastInstance instance = createHazelcastInstance(config);
DurableExecutorService executor = instance.getDurableExecutorService("test");
RunnableWithManagedContext task = new RunnableWithManagedContext();
executor.submit(task).get();
assertTrue("The task should have been initialized by the ManagedContext", initialized.get());
}
@Test
public void testExecuteOnKeyOwner() throws Exception {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = factory.newHazelcastInstance(smallInstanceConfig());
HazelcastInstance instance2 = factory.newHazelcastInstance(smallInstanceConfig());
String key = generateKeyOwnedBy(instance2);
String instanceName = instance2.getName();
ICountDownLatch latch = instance2.getCPSubsystem().getCountDownLatch(instanceName);
latch.trySetCount(1);
DurableExecutorService durableExecutorService = instance1.getDurableExecutorService(randomString());
durableExecutorService.executeOnKeyOwner(new InstanceAsserterRunnable(instanceName), key);
latch.await(30, TimeUnit.SECONDS);
}
@Test
public void hazelcastInstanceAwareAndLocal() throws Exception {
Config config = smallInstanceConfig();
config.addDurableExecutorConfig(new DurableExecutorConfig("test").setPoolSize(1));
HazelcastInstance instance = createHazelcastInstance(config);
DurableExecutorService executor = instance.getDurableExecutorService("test");
HazelcastInstanceAwareRunnable task = new HazelcastInstanceAwareRunnable();
// if setHazelcastInstance() not called we expect a RuntimeException
executor.submit(task).get();
}
@Test
public void testExecuteMultipleNode() throws Exception {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(NODE_COUNT);
HazelcastInstance[] instances = factory.newInstances(smallInstanceConfig());
for (int i = 0; i < NODE_COUNT; i++) {
DurableExecutorService service = instances[i].getDurableExecutorService("testExecuteMultipleNode");
int rand = new Random().nextInt(100);
Future<Integer> future = service.submit(new IncrementAtomicLongRunnable("count"), rand);
assertEquals(Integer.valueOf(rand), future.get(10, TimeUnit.SECONDS));
}
IAtomicLong count = instances[0].getCPSubsystem().getAtomicLong("count");
assertEquals(NODE_COUNT, count.get());
}
@Test
public void testSubmitToKeyOwnerRunnable() {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(NODE_COUNT);
HazelcastInstance[] instances = factory.newInstances(smallInstanceConfig());
final AtomicInteger nullResponseCount = new AtomicInteger(0);
final CountDownLatch responseLatch = new CountDownLatch(NODE_COUNT);
Consumer<Object> callback = response -> {
if (response == null) {
nullResponseCount.incrementAndGet();
}
responseLatch.countDown();
};
for (int i = 0; i < NODE_COUNT; i++) {
HazelcastInstance instance = instances[i];
DurableExecutorService service = instance.getDurableExecutorService("testSubmitToKeyOwnerRunnable");
Member localMember = instance.getCluster().getLocalMember();
UUID uuid = localMember.getUuid();
Runnable runnable = new IncrementAtomicLongIfMemberUUIDNotMatchRunnable(uuid, "testSubmitToKeyOwnerRunnable");
int key = findNextKeyForMember(instance, localMember);
service.submitToKeyOwner(runnable, key).thenAccept(callback);
}
assertOpenEventually(responseLatch);
assertEquals(0, instances[0].getCPSubsystem().getAtomicLong("testSubmitToKeyOwnerRunnable").get());
assertEquals(NODE_COUNT, nullResponseCount.get());
}
/**
* Submit a null task has to raise a NullPointerException.
*/
@Test(expected = NullPointerException.class)
@SuppressWarnings("ConstantConditions")
public void submitNullTask() {
DurableExecutorService executor = createSingleNodeDurableExecutorService("submitNullTask");
executor.submit((Callable<?>) null);
}
/**
* Run a basic task.
*/
@Test
public void testBasicTask() throws Exception {
Callable<String> task = new BasicTestCallable();
DurableExecutorService executor = createSingleNodeDurableExecutorService("testBasicTask");
Future<String> future = executor.submit(task);
assertEquals(future.get(), BasicTestCallable.RESULT);
}
@Test
public void testSubmitMultipleNode() throws Exception {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(NODE_COUNT);
HazelcastInstance[] instances = factory.newInstances(smallInstanceConfig());
for (int i = 0; i < NODE_COUNT; i++) {
DurableExecutorService service = instances[i].getDurableExecutorService("testSubmitMultipleNode");
Future<Long> future = service.submit(new IncrementAtomicLongCallable("testSubmitMultipleNode"));
assertEquals(i + 1, (long) future.get());
}
}
/* ############ submit callable ############ */
@Test
public void testSubmitToKeyOwnerCallable() throws Exception {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(NODE_COUNT);
HazelcastInstance[] instances = factory.newInstances(smallInstanceConfig());
List<Future<Boolean>> futures = new ArrayList<>();
for (int i = 0; i < NODE_COUNT; i++) {
HazelcastInstance instance = instances[i];
DurableExecutorService service = instance.getDurableExecutorService("testSubmitToKeyOwnerCallable");
Member localMember = instance.getCluster().getLocalMember();
int key = findNextKeyForMember(instance, localMember);
Future<Boolean> future = service.submitToKeyOwner(new MemberUUIDCheckCallable(localMember.getUuid()), key);
futures.add(future);
}
for (Future<Boolean> future : futures) {
assertTrue(future.get(10, TimeUnit.SECONDS));
}
}
@Test
public void testSubmitToKeyOwnerCallable_withCallback() {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(NODE_COUNT);
HazelcastInstance[] instances = factory.newInstances(smallInstanceConfig());
BooleanSuccessResponseCountingCallback callback = new BooleanSuccessResponseCountingCallback(NODE_COUNT);
for (int i = 0; i < NODE_COUNT; i++) {
HazelcastInstance instance = instances[i];
DurableExecutorService service = instance.getDurableExecutorService("testSubmitToKeyOwnerCallable");
Member localMember = instance.getCluster().getLocalMember();
int key = findNextKeyForMember(instance, localMember);
service.submitToKeyOwner(new MemberUUIDCheckCallable(localMember.getUuid()), key).thenAccept(callback);
}
assertOpenEventually(callback.getResponseLatch());
assertEquals(NODE_COUNT, callback.getSuccessResponseCount());
}
@Test
public void testIsDoneMethod() throws Exception {
Callable<String> task = new BasicTestCallable();
DurableExecutorService executor = createSingleNodeDurableExecutorService("isDoneMethod");
Future<String> future = executor.submit(task);
assertResult(future, BasicTestCallable.RESULT);
}
/**
* Repeatedly runs tasks and check for isDone() status after get().
* Test for the issue 129.
*/
@Test
public void testIsDoneMethodAfterGet() throws Exception {
DurableExecutorService executor = createSingleNodeDurableExecutorService("isDoneMethodAfterGet");
for (int i = 0; i < TASK_COUNT; i++) {
Callable<String> task1 = new BasicTestCallable();
Callable<String> task2 = new BasicTestCallable();
Future<String> future1 = executor.submit(task1);
Future<String> future2 = executor.submit(task2);
assertResult(future2, BasicTestCallable.RESULT);
assertResult(future1, BasicTestCallable.RESULT);
}
}
@Test
public void testMultipleFutureGetInvocations() throws Exception {
Callable<String> task = new BasicTestCallable();
DurableExecutorService executor = createSingleNodeDurableExecutorService("isTwoGetFromFuture");
Future<String> future = executor.submit(task);
assertResult(future, BasicTestCallable.RESULT);
assertResult(future, BasicTestCallable.RESULT);
assertResult(future, BasicTestCallable.RESULT);
assertResult(future, BasicTestCallable.RESULT);
}
/* ############ future ############ */
private void assertResult(Future<?> future, Object expected) throws Exception {
assertEquals(future.get(), expected);
assertTrue(future.isDone());
}
@Test
public void testIssue292() {
CountingDownExecutionCallback<Member> callback = new CountingDownExecutionCallback<>(1);
createSingleNodeDurableExecutorService("testIssue292").submit(new MemberCheck()).whenCompleteAsync(callback);
assertOpenEventually(callback.getLatch());
assertTrue(callback.getResult() instanceof Member);
}
/**
* Execute a task that is executing something else inside (nested execution).
*/
@Test
public void testNestedExecution() {
Callable<String> task = new NestedExecutorTask();
DurableExecutorService executor = createSingleNodeDurableExecutorService("testNestedExecution");
Future<?> future = executor.submit(task);
assertCompletesEventually(future);
}
/**
* Shutdown-related method behaviour when the cluster is running.
*/
@Test
public void testShutdownBehaviour() {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
HazelcastInstance instance1 = factory.newHazelcastInstance(smallInstanceConfig());
factory.newHazelcastInstance(smallInstanceConfig());
DurableExecutorService executor = instance1.getDurableExecutorService("testShutdownBehaviour");
// fresh instance, is not shutting down
assertFalse(executor.isShutdown());
assertFalse(executor.isTerminated());
executor.shutdown();
assertTrue(executor.isShutdown());
assertTrue(executor.isTerminated());
// shutdownNow() should return an empty list and be ignored
List<Runnable> pending = executor.shutdownNow();
assertTrue(pending.isEmpty());
assertTrue(executor.isShutdown());
assertTrue(executor.isTerminated());
// awaitTermination() should return immediately false
try {
boolean terminated = executor.awaitTermination(60L, TimeUnit.SECONDS);
assertFalse(terminated);
} catch (InterruptedException ie) {
fail("InterruptedException");
}
assertTrue(executor.isShutdown());
assertTrue(executor.isTerminated());
}
/**
* Shutting down the cluster should act as the ExecutorService shutdown.
*/
@Test(expected = RejectedExecutionException.class)
public void testClusterShutdown() {
ExecutorService executor = createSingleNodeDurableExecutorService("testClusterShutdown");
shutdownNodeFactory();
sleepSeconds(2);
assertNotNull(executor);
assertTrue(executor.isShutdown());
assertTrue(executor.isTerminated());
// new tasks must be rejected
Callable<String> task = new BasicTestCallable();
executor.submit(task);
}
@Test
public void testStatsIssue2039() {
Config config = smallInstanceConfig();
String name = "testStatsIssue2039";
config.addDurableExecutorConfig(new DurableExecutorConfig(name).setPoolSize(1).setCapacity(1));
HazelcastInstance instance = createHazelcastInstance(config);
DurableExecutorService executorService = instance.getDurableExecutorService(name);
executorService.execute(new SleepLatchRunnable());
assertOpenEventually(SleepLatchRunnable.startLatch, 30);
Future<?> rejected = executorService.submit(new EmptyRunnable());
try {
rejected.get(1, TimeUnit.MINUTES);
} catch (Exception e) {
boolean isRejected = e.getCause() instanceof RejectedExecutionException;
if (!isRejected) {
fail(e.toString());
}
} finally {
SleepLatchRunnable.sleepLatch.countDown();
}
// FIXME as soon as executorService.getLocalExecutorStats() is implemented
//LocalExecutorStats stats = executorService.getLocalExecutorStats();
//assertEquals(2, stats.getStartedTaskCount());
//assertEquals(0, stats.getPendingTaskCount());
}
@Test
public void testLongRunningCallable() throws Exception {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
Config config = smallInstanceConfig();
long callTimeoutMillis = 3000;
config.setProperty(ClusterProperty.OPERATION_CALL_TIMEOUT_MILLIS.getName(), String.valueOf(callTimeoutMillis));
HazelcastInstance hz1 = factory.newHazelcastInstance(config);
HazelcastInstance hz2 = factory.newHazelcastInstance(config);
String key = generateKeyOwnedBy(hz2);
DurableExecutorService executor = hz1.getDurableExecutorService("test");
Future<Boolean> future = executor.submitToKeyOwner(new SleepingTask(MILLISECONDS.toSeconds(callTimeoutMillis) * 3), key);
Boolean result = future.get(1, TimeUnit.MINUTES);
assertTrue(result);
}
@Test
public void durable_executor_collects_statistics_when_stats_enabled()
throws ExecutionException, InterruptedException {
// run task
String executorName = "durable_executor";
Config config = smallInstanceConfig();
config.getDurableExecutorConfig(executorName)
.setStatisticsEnabled(true);
HazelcastInstance instance = createHazelcastInstance(config);
DurableExecutorService executor = instance.getDurableExecutorService(executorName);
executor.submit(new OneSecondSleepingTask()).get();
// collect metrics
Map<String, List<Long>> metrics = collectMetrics(DURABLE_EXECUTOR_PREFIX, instance);
// check results
assertMetricsCollected(metrics, 1000, 0,
1, 1, 0, 1, 0);
}
@Test
public void durable_executor_does_not_collect_statistics_when_stats_disabled()
throws ExecutionException, InterruptedException {
// run task
String executorName = "durable_executor";
Config config = smallInstanceConfig();
config.getDurableExecutorConfig(executorName)
.setStatisticsEnabled(false);
HazelcastInstance instance = createHazelcastInstance(config);
DurableExecutorService executor = instance.getDurableExecutorService(executorName);
executor.submit(new OneSecondSleepingTask()).get();
// collect metrics
Map<String, List<Long>> metrics = collectMetrics(DURABLE_EXECUTOR_PREFIX, instance);
// check results
assertTrue("No metrics collection expected but " + metrics, metrics.isEmpty());
}
private static class InstanceAsserterRunnable implements Runnable, HazelcastInstanceAware, Serializable {
transient HazelcastInstance instance;
String instanceName;
InstanceAsserterRunnable(String instanceName) {
this.instanceName = instanceName;
}
@Override
public void run() {
if (instanceName.equals(instance.getName())) {
instance.getCPSubsystem().getCountDownLatch(instanceName).countDown();
}
}
@Override
public void setHazelcastInstance(HazelcastInstance hazelcastInstance) {
instance = hazelcastInstance;
}
}
private static class RunnableWithManagedContext implements Runnable, Serializable {
@Override
public void run() {
}
}
static class HazelcastInstanceAwareRunnable implements Runnable, HazelcastInstanceAware, Serializable {
private transient boolean initializeCalled = false;
@Override
public void run() {
if (!initializeCalled) {
throw new RuntimeException("The setHazelcastInstance should have been called");
}
}
@Override
public void setHazelcastInstance(HazelcastInstance hazelcastInstance) {
initializeCalled = true;
}
}
static class ICountDownLatchAwaitCallable implements Callable<Boolean>, HazelcastInstanceAware, Serializable {
private final String name;
private HazelcastInstance instance;
ICountDownLatchAwaitCallable(String name) {
this.name = name;
}
@Override
public Boolean call()
throws Exception {
return instance.getCPSubsystem().getCountDownLatch(name).await(100, TimeUnit.SECONDS);
}
@Override
public void setHazelcastInstance(HazelcastInstance instance) {
this.instance = instance;
}
}
static class SleepLatchRunnable implements Runnable, Serializable, PartitionAware<String> {
static CountDownLatch startLatch;
static CountDownLatch sleepLatch;
SleepLatchRunnable() {
startLatch = new CountDownLatch(1);
sleepLatch = new CountDownLatch(1);
}
@Override
public void run() {
startLatch.countDown();
assertOpenEventually(sleepLatch);
}
@Override
public String getPartitionKey() {
return "key";
}
}
static class EmptyRunnable implements Runnable, PartitionAware<String>, Serializable {
@Override
public void run() {
}
@Override
public String getPartitionKey() {
return "key";
}
}
static class OneSecondSleepingTask implements Runnable, Serializable {
OneSecondSleepingTask() {
}
@Override
public void run() {
sleepSeconds(1);
}
}
}
|
|
package gov.cdc.epiinfo.cloud;
import android.app.Activity;
import android.content.ContentValues;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.util.Base64;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.JsonObjectRequest;
import com.android.volley.toolbox.RequestFuture;
import com.android.volley.toolbox.Volley;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import gov.cdc.epiinfo.EpiDbHelper;
public class CouchDbClient implements ICloudClient {
private String tableName;
private String url;
private String authHeader;
private Context context;
private EpiDbHelper dbHelper;
public CouchDbClient(String tableName, EpiDbHelper dbHelper, Context context) {
this.context = context;
this.tableName = tableName.toLowerCase();
this.dbHelper = dbHelper;
dbHelper.AddTextColumn("_rev");
SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(context);
url = sharedPref.getString("sftp_url", "");
if (url.endsWith("/")) {
url = url.substring(0, url.lastIndexOf("/"));
}
String userName = sharedPref.getString("cloud_user_name", "");
String password = sharedPref.getString("cloud_pwd", "");
String auth = userName + ":" + password;
byte[] encodedAuth = Base64.encode(auth.getBytes(), Base64.DEFAULT);
authHeader = "Basic " + new String(encodedAuth);
createTable();
}
@Override
public int getDailyTasks(Activity ctx, String deviceId) {
return -1;
}
public JSONArray getData(boolean downloadImages, boolean downloadMedia, EpiDbHelper dbHelper) {
try {
JSONObject response = GetResponse(url + "/" + tableName + "/_all_docs?include_docs=true", null, Request.Method.GET);
if (response != null) {
JSONArray rows = response.getJSONArray("rows");
if (rows != null) {
JSONArray results = new JSONArray();
for (int x = 0; x < rows.length(); x++) {
JSONObject item = (JSONObject) ((JSONObject) rows.get(x)).get("doc");
results.put(item);
}
return results;
}
}
} catch (Exception ex) {
}
return null;
}
private boolean createTable() {
try {
HttpClient client = new DefaultHttpClient();
HttpPut httpPut = new HttpPut(url + "/" + tableName);
httpPut.setHeader("Accept", "application/json");
httpPut.setHeader("Content-type", "application/json");
httpPut.setHeader(HttpHeaders.AUTHORIZATION, authHeader);
HttpResponse response = client.execute(httpPut);
return response.getStatusLine().getStatusCode() < 300;
} catch (Exception ex) {
return false;
}
}
public boolean insertRecord(ContentValues values) {
String guid = values.getAsString("id");
values.put("_id", guid);
values.remove("id");
if (values.containsKey("_updateStamp"))
{
values.remove("_updateStamp");
}
if (values.containsKey("_syncStatus"))
{
values.remove("_syncStatus");
}
JSONObject jsonObject = new JSONObject();
try {
for (String key : values.keySet()) {
Object value = values.get(key);
if (value != null) {
if (value instanceof Integer) {
jsonObject.put(key, value);
} else if (value instanceof Double) {
if (((Double) value) < Double.POSITIVE_INFINITY) {
jsonObject.put(key, value);
}
} else if (value instanceof Long) {
jsonObject.put(key, value);
} else if (value instanceof Boolean) {
jsonObject.put(key, value);
} else {
jsonObject.put(key, value.toString());
}
}
}
} catch (JSONException e) {
e.printStackTrace();
}
try {
JSONObject response = GetResponse(url + "/" + tableName + "/" + guid, jsonObject, Request.Method.PUT);
if (response != null) {
String rev = response.getString("rev");
if (rev != null && !rev.equals("")) {
dbHelper.updateRevision(guid, rev);
return true;
}
}
} catch (Exception e) {
System.out.println("CouchDB error " + e.toString());
}
return false;
}
public boolean deleteRecord(String recordId) {
String rev = dbHelper.getFieldValue("_rev", recordId);
if (rev != null && !rev.equals("")) {
try {
JSONObject response = GetResponse(url + "/" + tableName + "/" + recordId + "?rev=" + rev, null, Request.Method.DELETE);
if (response != null) {
return response.getBoolean("ok");
}
}
catch (Exception ex)
{
}
}
return false;
}
public boolean updateRecord(String recordId, ContentValues values) {
if (!values.containsKey("_rev")) {
String rev = dbHelper.getFieldValue("_rev", recordId);
if (rev != null && !rev.equals("")) {
values.put("_rev", rev);
}
}
return insertRecord(values);
}
public JSONObject GetResponse(String url, JSONObject json, int method) {
RequestFuture<JSONObject> future = RequestFuture.newFuture();
JsonObjectRequest request = new JsonObjectRequest(method, url, json, future, future) {
@Override
public Map<String, String> getHeaders() {
Map<String, String> params = new HashMap<>();
params.put(HttpHeaders.AUTHORIZATION, authHeader);
return params;
}
@Override
protected VolleyError parseNetworkError(VolleyError volleyError) {
if (volleyError != null && volleyError.networkResponse != null) {
String error = new String(volleyError.networkResponse.data);
}
return volleyError;
}
};
RequestQueue requestQueue = Volley.newRequestQueue(context);
requestQueue.add(request);
try {
return future.get(10, TimeUnit.SECONDS);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.configuration;
import java.io.Serializable;
import java.util.zip.Deflater;
import org.apache.ignite.IgniteSystemProperties;
import org.apache.ignite.internal.processors.cache.persistence.file.AsyncFileIOFactory;
import org.apache.ignite.internal.processors.cache.persistence.file.FileIOFactory;
import org.apache.ignite.internal.processors.cache.persistence.file.RandomAccessFileIOFactory;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.A;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.mxbean.MetricsMxBean;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_DEFAULT_DATA_STORAGE_PAGE_SIZE;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_USE_ASYNC_FILE_IO_FACTORY;
/**
* A durable memory configuration for an Apache Ignite node. The durable memory is a manageable off-heap based memory
* architecture that divides all expandable data regions into pages of fixed size
* (see {@link DataStorageConfiguration#getPageSize()}). An individual page can store one or many cache key-value entries
* that allows reusing the memory in the most efficient way and avoid memory fragmentation issues.
* <p>
* By default, the durable memory allocates a single expandable data region with default settings. All the caches that
* will be configured in an application will be mapped to this data region by default, thus, all the cache data will
* reside in that data region. Parameters of default data region can be changed by setting
* {@link DataStorageConfiguration#setDefaultDataRegionConfiguration(DataRegionConfiguration)}.
* Other data regions (except default) can be configured with
* {@link DataStorageConfiguration#setDataRegionConfigurations(DataRegionConfiguration...)}.
* <p>
* Data region can be used in memory-only mode, or in persistent mode, when memory is used as a caching layer for disk.
* Persistence for data region can be turned on with {@link DataRegionConfiguration#setPersistenceEnabled(boolean)}
* flag. To learn more about data regions refer to {@link DataRegionConfiguration} documentation.
* <p>Sample configuration below shows how to make 5 GB data regions the default one for Apache Ignite:</p>
* <pre>
* {@code
*
* <property name="dataStorageConfiguration">
* <bean class="org.apache.ignite.configuration.DataStorageConfiguration">
* <property name="systemCacheInitialSize" value="#{100L * 1024 * 1024}"/>
*
* <property name="defaultDataRegionConfiguration">
* <bean class="org.apache.ignite.configuration.DataRegionConfiguration">
* <property name="name" value="default_data_region"/>
* <property name="initialSize" value="#{5L * 1024 * 1024 * 1024}"/>
* </bean>
* </property>
* </bean>
* </property>
* }
* </pre>
*/
public class DataStorageConfiguration implements Serializable {
/** */
private static final long serialVersionUID = 0L;
/** Default data region start size (256 MB). */
public static final long DFLT_DATA_REGION_INITIAL_SIZE = 256L * 1024 * 1024;
/** Fraction of available memory to allocate for default DataRegion. */
private static final double DFLT_DATA_REGION_FRACTION = 0.2;
/** Default data region's size is 20% of physical memory available on current machine. */
public static final long DFLT_DATA_REGION_MAX_SIZE = Math.max(
(long)(DFLT_DATA_REGION_FRACTION * U.getTotalMemoryAvailable()),
DFLT_DATA_REGION_INITIAL_SIZE);
/** Default initial size of a memory chunk for the system cache (40 MB). */
private static final long DFLT_SYS_REG_INIT_SIZE = 40L * 1024 * 1024;
/** Default max size of a memory chunk for the system cache (100 MB). */
private static final long DFLT_SYS_REG_MAX_SIZE = 100L * 1024 * 1024;
/** Default memory page size. */
public static final int DFLT_PAGE_SIZE = 4 * 1024;
/** Max memory page size. */
public static final int MAX_PAGE_SIZE = 16 * 1024;
/** Min memory page size. */
public static final int MIN_PAGE_SIZE = 1024;
/** This name is assigned to default Dataregion if no user-defined default MemPlc is specified */
public static final String DFLT_DATA_REG_DEFAULT_NAME = "default";
/** */
public static final int DFLT_CHECKPOINT_FREQ = 180000;
/** Lock default wait time, 10 sec. */
public static final int DFLT_LOCK_WAIT_TIME = 10 * 1000;
/** */
public static final boolean DFLT_METRICS_ENABLED = false;
/** Default amount of sub intervals to calculate rate-based metric. */
public static final int DFLT_SUB_INTERVALS = 5;
/** Default length of interval over which rate-based metric is calculated. */
public static final int DFLT_RATE_TIME_INTERVAL_MILLIS = 60_000;
/** Default number of checkpoint threads. */
public static final int DFLT_CHECKPOINT_THREADS = 4;
/** Default checkpoint write order. */
public static final CheckpointWriteOrder DFLT_CHECKPOINT_WRITE_ORDER = CheckpointWriteOrder.SEQUENTIAL;
/** Default number of checkpoints to be kept in WAL after checkpoint is finished */
public static final int DFLT_WAL_HISTORY_SIZE = 20;
/** Default max size of WAL archive files, in bytes */
public static final long DFLT_WAL_ARCHIVE_MAX_SIZE = 1024 * 1024 * 1024;
/** */
public static final int DFLT_WAL_SEGMENTS = 10;
/** Default WAL file segment size, 64MBytes */
public static final int DFLT_WAL_SEGMENT_SIZE = 64 * 1024 * 1024;
/** Default wal mode. */
public static final WALMode DFLT_WAL_MODE = WALMode.LOG_ONLY;
/** Default thread local buffer size. */
public static final int DFLT_TLB_SIZE = 128 * 1024;
/** Default thread local buffer size. */
public static final int DFLT_WAL_BUFF_SIZE = DFLT_WAL_SEGMENT_SIZE / 4;
/** Default Wal flush frequency. */
public static final int DFLT_WAL_FLUSH_FREQ = 2000;
/** Default wal fsync delay. */
public static final int DFLT_WAL_FSYNC_DELAY = 1000;
/** Default wal record iterator buffer size. */
public static final int DFLT_WAL_RECORD_ITERATOR_BUFFER_SIZE = 64 * 1024 * 1024;
/** Default wal always write full pages. */
public static final boolean DFLT_WAL_ALWAYS_WRITE_FULL_PAGES = false;
/** Default wal directory. */
public static final String DFLT_WAL_PATH = "db/wal";
/** Default wal archive directory. */
public static final String DFLT_WAL_ARCHIVE_PATH = "db/wal/archive";
/** Default path (relative to working directory) of binary metadata folder */
public static final String DFLT_BINARY_METADATA_PATH = "db/binary_meta";
/** Default path (relative to working directory) of marshaller mappings folder */
public static final String DFLT_MARSHALLER_PATH = "db/marshaller";
/** Default write throttling enabled. */
public static final boolean DFLT_WRITE_THROTTLING_ENABLED = false;
/** Default wal compaction enabled. */
public static final boolean DFLT_WAL_COMPACTION_ENABLED = false;
/** Default wal compaction level. */
public static final int DFLT_WAL_COMPACTION_LEVEL = Deflater.BEST_SPEED;
/** Default compression algorithm for WAL page snapshot records. */
public static final DiskPageCompression DFLT_WAL_PAGE_COMPRESSION = DiskPageCompression.DISABLED;
/** @see IgniteSystemProperties#IGNITE_USE_ASYNC_FILE_IO_FACTORY */
public static final boolean DFLT_USE_ASYNC_FILE_IO_FACTORY = true;
/** Initial size of a memory chunk reserved for system cache. */
private long sysRegionInitSize = DFLT_SYS_REG_INIT_SIZE;
/** Maximum size of a memory chunk reserved for system cache. */
private long sysRegionMaxSize = DFLT_SYS_REG_MAX_SIZE;
/** Memory page size. */
private int pageSize = IgniteSystemProperties.getInteger(
IGNITE_DEFAULT_DATA_STORAGE_PAGE_SIZE, 0);
/** Concurrency level. */
private int concLvl;
/** Configuration of default data region. */
private DataRegionConfiguration dfltDataRegConf = new DataRegionConfiguration();
/** Data regions. */
@GridToStringInclude
private DataRegionConfiguration[] dataRegions;
/** Directory where index and partition files are stored. */
private String storagePath;
/** Checkpoint frequency. */
private long checkpointFreq = DFLT_CHECKPOINT_FREQ;
/** Lock wait time, in milliseconds. */
private long lockWaitTime = DFLT_LOCK_WAIT_TIME;
/** */
private int checkpointThreads = DFLT_CHECKPOINT_THREADS;
/** Checkpoint write order. */
private CheckpointWriteOrder checkpointWriteOrder = DFLT_CHECKPOINT_WRITE_ORDER;
/** Number of checkpoints to keep */
private int walHistSize = DFLT_WAL_HISTORY_SIZE;
/** Maximum size of wal archive folder, in bytes */
private long maxWalArchiveSize = DFLT_WAL_ARCHIVE_MAX_SIZE;
/** Number of work WAL segments. */
private int walSegments = DFLT_WAL_SEGMENTS;
/** Size of one WAL segment in bytes. 64 Mb is used by default. Maximum value is 2Gb */
private int walSegmentSize = DFLT_WAL_SEGMENT_SIZE;
/** Directory where WAL is stored (work directory) */
private String walPath = DFLT_WAL_PATH;
/** WAL archive path. */
private String walArchivePath = DFLT_WAL_ARCHIVE_PATH;
/** Metrics enabled flag. */
private boolean metricsEnabled = DFLT_METRICS_ENABLED;
/** Wal mode. */
private WALMode walMode = DFLT_WAL_MODE;
/** WAl thread local buffer size. */
private int walTlbSize = DFLT_TLB_SIZE;
/** WAl buffer size. */
private int walBuffSize;
/** Wal flush frequency in milliseconds. */
private long walFlushFreq = DFLT_WAL_FLUSH_FREQ;
/** Wal fsync delay. */
private long walFsyncDelay = DFLT_WAL_FSYNC_DELAY;
/** Wal record iterator buffer size. */
private int walRecordIterBuffSize = DFLT_WAL_RECORD_ITERATOR_BUFFER_SIZE;
/** Always write full pages. */
private boolean alwaysWriteFullPages = DFLT_WAL_ALWAYS_WRITE_FULL_PAGES;
/** Factory to provide I/O interface for data storage files */
private FileIOFactory fileIOFactory =
IgniteSystemProperties.getBoolean(IGNITE_USE_ASYNC_FILE_IO_FACTORY, DFLT_USE_ASYNC_FILE_IO_FACTORY) ?
new AsyncFileIOFactory() : new RandomAccessFileIOFactory();
/**
* Number of sub-intervals the whole {@link #setMetricsRateTimeInterval(long)} will be split into to calculate
* rate-based metrics.
* <p>
* Setting it to a bigger value will result in more precise calculation and smaller drops of
* rate-based metrics when next sub-interval has to be recycled but introduces bigger
* calculation overhead.
*/
private int metricsSubIntervalCnt = DFLT_SUB_INTERVALS;
/** Time interval (in milliseconds) for rate-based metrics. */
private long metricsRateTimeInterval = DFLT_RATE_TIME_INTERVAL_MILLIS;
/**
* Time interval (in milliseconds) for running auto archiving for incompletely WAL segment
*/
private long walAutoArchiveAfterInactivity = -1;
/**
* If true, threads that generate dirty pages too fast during ongoing checkpoint will be throttled.
*/
private boolean writeThrottlingEnabled = DFLT_WRITE_THROTTLING_ENABLED;
/**
* Flag to enable WAL compaction. If true, system filters and compresses WAL archive in background.
* Compressed WAL archive gets automatically decompressed on demand.
*/
private boolean walCompactionEnabled = DFLT_WAL_COMPACTION_ENABLED;
/**
* ZIP level to WAL compaction.
*
* @see java.util.zip.ZipOutputStream#setLevel(int)
* @see java.util.zip.Deflater#BEST_SPEED
* @see java.util.zip.Deflater#BEST_COMPRESSION
*/
private int walCompactionLevel = DFLT_WAL_COMPACTION_LEVEL;
/** Timeout for checkpoint read lock acquisition. */
private Long checkpointReadLockTimeout;
/** Compression algorithm for WAL page snapshot records. */
private DiskPageCompression walPageCompression = DFLT_WAL_PAGE_COMPRESSION;
/** Compression level for WAL page snapshot records. */
private Integer walPageCompressionLevel;
/** Default warm-up configuration. */
@Nullable private WarmUpConfiguration dfltWarmUpCfg;
/** Encryption configuration. */
private EncryptionConfiguration encCfg = new EncryptionConfiguration();
/**
* Creates valid durable memory configuration with all default values.
*/
@SuppressWarnings("RedundantNoArgConstructor")
public DataStorageConfiguration() {
}
/**
* Initial size of a data region reserved for system cache.
*
* @return Size in bytes.
*/
public long getSystemRegionInitialSize() {
return sysRegionInitSize;
}
/**
* Sets initial size of a data region reserved for system cache.
*
* Default value is {@link #DFLT_SYS_REG_INIT_SIZE}
*
* @param sysRegionInitSize Size in bytes.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setSystemRegionInitialSize(long sysRegionInitSize) {
A.ensure(sysRegionInitSize > 0, "System region initial size can not be less zero.");
this.sysRegionInitSize = sysRegionInitSize;
return this;
}
/**
* Maximum data region size reserved for system cache.
*
* @return Size in bytes.
*/
public long getSystemRegionMaxSize() {
return sysRegionMaxSize;
}
/**
* Sets maximum data region size reserved for system cache. The total size should not be less than 10 MB
* due to internal data structures overhead.
*
* Default value is {@link #DFLT_SYS_REG_MAX_SIZE}.
*
* @param sysRegionMaxSize Maximum size in bytes for system cache data region.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setSystemRegionMaxSize(long sysRegionMaxSize) {
A.ensure(sysRegionMaxSize > 0, "System region max size can not be less zero.");
this.sysRegionMaxSize = sysRegionMaxSize;
return this;
}
/**
* The page memory consists of one or more expandable data regions defined by {@link DataRegionConfiguration}.
* Every data region is split on pages of fixed size that store actual cache entries.
*
* @return Page size in bytes.
*/
public int getPageSize() {
return pageSize;
}
/**
* Changes the page size.
*
* @param pageSize Page size in bytes. Supported values are: {@code 1024}, {@code 2048}, {@code 4096}, {@code 8192}
* and {@code 16384}. If value is not set (or zero), {@link #DFLT_PAGE_SIZE} ({@code 4096}) will be used.
* @see #MIN_PAGE_SIZE
* @see #MAX_PAGE_SIZE
*/
public DataStorageConfiguration setPageSize(int pageSize) {
if (pageSize != 0) {
A.ensure(pageSize >= MIN_PAGE_SIZE && pageSize <= MAX_PAGE_SIZE,
"Page size must be between 1kB and 16kB.");
A.ensure(U.isPow2(pageSize), "Page size must be a power of 2.");
}
this.pageSize = pageSize;
return this;
}
/**
* Gets an array of all data regions configured. Apache Ignite will instantiate a dedicated data region per
* region. An Apache Ignite cache can be mapped to a specific region with
* {@link CacheConfiguration#setDataRegionName(String)} method.
*
* @return Array of configured data regions.
*/
public DataRegionConfiguration[] getDataRegionConfigurations() {
return dataRegions;
}
/**
* Sets data regions configurations.
*
* @param dataRegionConfigurations Data regions configurations.
*/
public DataStorageConfiguration setDataRegionConfigurations(DataRegionConfiguration... dataRegionConfigurations) {
this.dataRegions = dataRegionConfigurations;
return this;
}
/**
* Returns the number of concurrent segments in Ignite internal page mapping tables.
*
* By default equals to the number of available CPUs.
*
* @return Mapping table concurrency level(always greater than 0).
*/
public int getConcurrencyLevel() {
return concLvl <= 0 ? Runtime.getRuntime().availableProcessors() : concLvl;
}
/**
* Sets the number of concurrent segments in Ignite internal page mapping tables.
*
* If value is not positive, the number of available CPUs will be used.
*
* @param concLvl Mapping table concurrency level.
*/
public DataStorageConfiguration setConcurrencyLevel(int concLvl) {
this.concLvl = concLvl;
return this;
}
/**
* @return Configuration of default data region. All cache groups will reside in this data region by default.
* For assigning a custom data region to cache group, use {@link CacheConfiguration#setDataRegionName(String)}.
*/
public DataRegionConfiguration getDefaultDataRegionConfiguration() {
return dfltDataRegConf;
}
/**
* Overrides configuration of default data region which is created automatically.
*
* @param dfltDataRegConf Default data region configuration.
*/
public DataStorageConfiguration setDefaultDataRegionConfiguration(DataRegionConfiguration dfltDataRegConf) {
this.dfltDataRegConf = dfltDataRegConf;
return this;
}
/**
* Returns a path the root directory where the Persistent Store will persist data and indexes.
*/
public String getStoragePath() {
return storagePath;
}
/**
* Sets a path to the root directory where the Persistent Store will persist data and indexes.
* By default the Persistent Store's files are located under Ignite work directory.
*
* @param persistenceStorePath Persistence store path.
*/
public DataStorageConfiguration setStoragePath(String persistenceStorePath) {
this.storagePath = persistenceStorePath;
return this;
}
/**
* Gets checkpoint frequency.
*
* @return Checkpoint frequency in milliseconds.
*/
public long getCheckpointFrequency() {
return checkpointFreq <= 0 ? DFLT_CHECKPOINT_FREQ : checkpointFreq;
}
/**
* Sets the checkpoint frequency which is a minimal interval when the dirty pages will be written
* to the Persistent Store. If the rate is high, checkpoint will be triggered more frequently.
*
* If value is not positive, {@link #DFLT_CHECKPOINT_FREQ} will be used.
*
* @param checkpointFreq Checkpoint frequency in milliseconds.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setCheckpointFrequency(long checkpointFreq) {
this.checkpointFreq = checkpointFreq;
return this;
}
/**
* Gets a number of threads to use for the checkpoint purposes.
*
* @return Number of checkpoint threads.
*/
public int getCheckpointThreads() {
return checkpointThreads;
}
/**
* Sets a number of threads to use for the checkpoint purposes.
*
* @param checkpointThreads Number of checkpoint threads. Four threads are used by default.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setCheckpointThreads(int checkpointThreads) {
this.checkpointThreads = checkpointThreads;
return this;
}
/**
* Timeout in milliseconds to wait when acquiring persistence store lock file before failing the local node.
*
* @return Lock wait time in milliseconds.
*/
public long getLockWaitTime() {
return lockWaitTime;
}
/**
* Timeout in milliseconds to wait when acquiring persistence store lock file before failing the local node.
*
* @param lockWaitTime Lock wait time in milliseconds.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setLockWaitTime(long lockWaitTime) {
this.lockWaitTime = lockWaitTime;
return this;
}
/**
* Gets a total number of checkpoints to keep in the WAL history.
*
* @return Number of checkpoints to keep in WAL after a checkpoint is finished.
* @see DataStorageConfiguration#getMaxWalArchiveSize()
* @deprecated Instead of walHistorySize use maxWalArchiveSize for manage of archive size.
*/
@Deprecated
public int getWalHistorySize() {
return walHistSize <= 0 ? DFLT_WAL_HISTORY_SIZE : walHistSize;
}
/**
* Sets a total number of checkpoints to keep in the WAL history.
*
* @param walHistSize Number of checkpoints to keep after a checkpoint is finished.
* @return {@code this} for chaining.
* @see DataStorageConfiguration#setMaxWalArchiveSize(long)
* @deprecated Instead of walHistorySize use maxWalArchiveSize for manage of archive size.
*/
@Deprecated
public DataStorageConfiguration setWalHistorySize(int walHistSize) {
this.walHistSize = walHistSize;
return this;
}
/**
* If WalHistorySize was set by user will use this parameter for compatibility.
*
* @return {@code true} if use WalHistorySize for compatibility.
*/
public boolean isWalHistorySizeParameterUsed() {
return getWalHistorySize() != DFLT_WAL_HISTORY_SIZE && getWalHistorySize() != Integer.MAX_VALUE;
}
/**
* Gets a max allowed size(in bytes) of WAL archives.
*
* @return max size(in bytes) of WAL archive directory(always greater than 0).
*/
public long getMaxWalArchiveSize() {
return maxWalArchiveSize <= 0 ? DFLT_WAL_ARCHIVE_MAX_SIZE : maxWalArchiveSize;
}
/**
* Sets a max allowed size(in bytes) of WAL archives.
*
* If value is not positive, {@link #DFLT_WAL_ARCHIVE_MAX_SIZE} will be used.
*
* @param walArchiveMaxSize max size(in bytes) of WAL archive directory.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setMaxWalArchiveSize(long walArchiveMaxSize) {
this.maxWalArchiveSize = walArchiveMaxSize;
return this;
}
/**
* Gets a number of WAL segments to work with.
*
* @return Number of work WAL segments.
*/
public int getWalSegments() {
return walSegments <= 0 ? DFLT_WAL_SEGMENTS : walSegments;
}
/**
* Sets a number of WAL segments to work with. For performance reasons,
* the whole WAL is split into files of fixed length called segments.
*
* @param walSegments Number of WAL segments. Value must be greater than 1.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setWalSegments(int walSegments) {
if (walSegments != 0)
A.ensure(walSegments > 1, "Number of WAL segments must be greater than 1.");
this.walSegments = walSegments;
return this;
}
/**
* Gets size(in bytes) of a WAL segment.
*
* @return WAL segment size(in bytes).
*/
public int getWalSegmentSize() {
return walSegmentSize == 0 ? DFLT_WAL_SEGMENT_SIZE : walSegmentSize;
}
/**
* Sets size(in bytes) of a WAL segment.
* If value is not set (or zero), {@link #DFLT_WAL_SEGMENT_SIZE} will be used.
*
* @param walSegmentSize WAL segment size(in bytes). Value must be between 512Kb and 2Gb.
* @return {@code This} for chaining.
*/
public DataStorageConfiguration setWalSegmentSize(int walSegmentSize) {
if (walSegmentSize != 0)
A.ensure(walSegmentSize >= 512 * 1024, "WAL segment size must be between 512Kb and 2Gb.");
this.walSegmentSize = walSegmentSize;
return this;
}
/**
* Gets a path to the directory where WAL is stored.
*
* @return WAL persistence path, absolute or relative to Ignite work directory.
*/
public String getWalPath() {
return walPath;
}
/**
* Sets a path to the directory where WAL is stored. If this path is relative, it will be resolved
* relatively to Ignite work directory.
*
* @param walStorePath WAL persistence path, absolute or relative to Ignite work directory.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setWalPath(String walStorePath) {
this.walPath = walStorePath;
return this;
}
/**
* Gets a path to the WAL archive directory.
*
* @return WAL archive directory.
*/
public String getWalArchivePath() {
return walArchivePath;
}
/**
* Sets a path for the WAL archive directory. Every WAL segment will be fully copied to this directory before
* it can be reused for WAL purposes.
*
* @param walArchivePath WAL archive directory.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setWalArchivePath(String walArchivePath) {
this.walArchivePath = walArchivePath;
return this;
}
/**
* Gets flag indicating whether persistence metrics collection is enabled.
* Default value is {@link #DFLT_METRICS_ENABLED}.
*
* @return Metrics enabled flag.
*/
public boolean isMetricsEnabled() {
return metricsEnabled;
}
/**
* Sets flag indicating whether persistence metrics collection is enabled.
*
* @param metricsEnabled Metrics enabled flag.
*/
public DataStorageConfiguration setMetricsEnabled(boolean metricsEnabled) {
this.metricsEnabled = metricsEnabled;
return this;
}
/**
* Gets flag indicating whether write throttling is enabled.
*/
public boolean isWriteThrottlingEnabled() {
return writeThrottlingEnabled;
}
/**
* Sets flag indicating whether write throttling is enabled.
*
* @param writeThrottlingEnabled Write throttling enabled flag.
*/
public DataStorageConfiguration setWriteThrottlingEnabled(boolean writeThrottlingEnabled) {
this.writeThrottlingEnabled = writeThrottlingEnabled;
return this;
}
/**
* Gets the length of the time interval for rate-based metrics. This interval defines a window over which
* hits will be tracked. Default value is {@link #DFLT_RATE_TIME_INTERVAL_MILLIS}.
*
* @return Time interval in milliseconds.
* @deprecated Use {@link MetricsMxBean#configureHitRateMetric(String, long)} instead.
*/
@Deprecated
public long getMetricsRateTimeInterval() {
return metricsRateTimeInterval;
}
/**
* Sets the length of the time interval for rate-based metrics. This interval defines a window over which
* hits will be tracked.
*
* @param metricsRateTimeInterval Time interval in milliseconds.
* @deprecated Use {@link MetricsMxBean#configureHitRateMetric(String, long)} instead.
*/
@Deprecated
public DataStorageConfiguration setMetricsRateTimeInterval(long metricsRateTimeInterval) {
this.metricsRateTimeInterval = metricsRateTimeInterval;
return this;
}
/**
* Gets the number of sub-intervals to split the {@link #getMetricsRateTimeInterval()} into to track the update history.
* Default value is {@link #DFLT_SUB_INTERVALS}.
*
* @return The number of sub-intervals for history tracking.
* @deprecated Use {@link MetricsMxBean#configureHitRateMetric(String, long)} instead.
*/
@Deprecated
public int getMetricsSubIntervalCount() {
return metricsSubIntervalCnt;
}
/**
* Sets the number of sub-intervals to split the {@link #getMetricsRateTimeInterval()} into to track the update history.
*
* @param metricsSubIntervalCnt The number of sub-intervals for history tracking.
* @deprecated Use {@link MetricsMxBean#configureHitRateMetric(String, long)} instead.
*/
@Deprecated
public DataStorageConfiguration setMetricsSubIntervalCount(int metricsSubIntervalCnt) {
this.metricsSubIntervalCnt = metricsSubIntervalCnt;
return this;
}
/**
* Property that defines behavior of wal fsync.
* Different type provides different guarantees for consistency. See {@link WALMode} for details.
*
* @return WAL mode.
*/
public WALMode getWalMode() {
return walMode == null ? DFLT_WAL_MODE : walMode;
}
/**
* Sets property that defines behavior of wal fsync.
* Different type provides different guarantees for consistency. See {@link WALMode} for details.
*
* @param walMode Wal mode.
*/
public DataStorageConfiguration setWalMode(WALMode walMode) {
if (walMode == WALMode.DEFAULT)
walMode = WALMode.FSYNC;
this.walMode = walMode;
return this;
}
/**
* Property for size of thread local buffer.
* Each thread which write to wal have thread local buffer for serialize recode before write in wal.
*
* @return Thread local buffer size (in bytes).
*/
public int getWalThreadLocalBufferSize() {
return walTlbSize <= 0 ? DFLT_TLB_SIZE : walTlbSize;
}
/**
* Sets size of thread local buffer.
* Each thread which write to wal have thread local buffer for serialize recode before write in wal.
*
* @param walTlbSize Thread local buffer size (in bytes).
*/
public DataStorageConfiguration setWalThreadLocalBufferSize(int walTlbSize) {
this.walTlbSize = walTlbSize;
return this;
}
/**
* Property defines size(in bytes) of WAL buffer.
* Each WAL record will be serialized to this buffer before write in WAL file.
*
* @return WAL buffer size(in bytes).
*/
public int getWalBufferSize() {
return walBuffSize <= 0 ? getWalSegmentSize() / 4 : walBuffSize;
}
/**
* Property defines size(in bytes) of WAL buffer.
* If value isn't positive it calculation will be based on {@link #getWalSegmentSize()}.
*
* @param walBuffSize WAL buffer size(in bytes).
*/
public DataStorageConfiguration setWalBufferSize(int walBuffSize) {
this.walBuffSize = walBuffSize;
return this;
}
/**
* This property define how often WAL will be fsync-ed in {@code BACKGROUND} mode. Ignored for
* all other WAL modes.
*
* @return WAL flush frequency, in milliseconds.
*/
public long getWalFlushFrequency() {
return walFlushFreq;
}
/**
* This property define how often WAL will be fsync-ed in {@code BACKGROUND} mode. Ignored for
* all other WAL modes.
*
* @param walFlushFreq WAL flush frequency, in milliseconds.
*/
public DataStorageConfiguration setWalFlushFrequency(long walFlushFreq) {
this.walFlushFreq = walFlushFreq;
return this;
}
/**
* Property that allows to trade latency for throughput in {@link WALMode#FSYNC} mode.
* It limits minimum time interval between WAL fsyncs. First thread that initiates WAL fsync will wait for
* this number of nanoseconds, another threads will just wait fsync of first thread (similar to CyclicBarrier).
* Total throughput should increase under load as total WAL fsync rate will be limited.
*/
public long getWalFsyncDelayNanos() {
return walFsyncDelay <= 0 ? DFLT_WAL_FSYNC_DELAY : walFsyncDelay;
}
/**
* Sets property that allows to trade latency for throughput in {@link WALMode#FSYNC} mode.
* It limits minimum time interval between WAL fsyncs. First thread that initiates WAL fsync will wait for
* this number of nanoseconds, another threads will just wait fsync of first thread (similar to CyclicBarrier).
* Total throughput should increase under load as total WAL fsync rate will be limited.
*
* @param walFsyncDelayNanos Wal fsync delay, in nanoseconds.
*/
public DataStorageConfiguration setWalFsyncDelayNanos(long walFsyncDelayNanos) {
walFsyncDelay = walFsyncDelayNanos;
return this;
}
/**
* Property define how many bytes iterator read from
* disk (for one reading), during go ahead wal.
*
* @return Record iterator buffer size.
*/
public int getWalRecordIteratorBufferSize() {
return walRecordIterBuffSize <= 0 ? DFLT_WAL_RECORD_ITERATOR_BUFFER_SIZE : walRecordIterBuffSize;
}
/**
* Sets property defining how many bytes iterator read from
* disk (for one reading), during go ahead wal.
*
* @param walRecordIterBuffSize Wal record iterator buffer size.
*/
public DataStorageConfiguration setWalRecordIteratorBufferSize(int walRecordIterBuffSize) {
this.walRecordIterBuffSize = walRecordIterBuffSize;
return this;
}
/**
* Gets flag that enforces writing full page to WAL on every change (instead of delta record).
* Can be used for debugging purposes: every version of page will be present in WAL.
* Note that WAL will take several times more space in this mode.
*/
public boolean isAlwaysWriteFullPages() {
return alwaysWriteFullPages;
}
/**
* Sets flag that enforces writing full page to WAL on every change (instead of delta record).
* Can be used for debugging purposes: every version of page will be present in WAL.
* Note that WAL will take several times more space in this mode.
*
* @param alwaysWriteFullPages Always write full pages flag.
*/
public DataStorageConfiguration setAlwaysWriteFullPages(boolean alwaysWriteFullPages) {
this.alwaysWriteFullPages = alwaysWriteFullPages;
return this;
}
/**
* Factory to provide implementation of FileIO interface
* which is used for data storage files read/write operations
*
* @return File I/O factory
*/
public FileIOFactory getFileIOFactory() {
return fileIOFactory;
}
/**
* Sets factory to provide implementation of FileIO interface
* which is used for data storage files read/write operations
*
* @param fileIOFactory File I/O factory
*/
public DataStorageConfiguration setFileIOFactory(FileIOFactory fileIOFactory) {
this.fileIOFactory = fileIOFactory;
return this;
}
/**
* <b>Note:</b> setting this value with {@link WALMode#FSYNC} may generate file size overhead for WAL segments in case
* grid is used rarely.
*
* @param walAutoArchiveAfterInactivity time in millis to run auto archiving segment (even if incomplete) after last
* record logging. <br> Positive value enables incomplete segment archiving after timeout (inactivity). <br> Zero or
* negative value disables auto archiving.
* @return current configuration instance for chaining
*/
public DataStorageConfiguration setWalAutoArchiveAfterInactivity(long walAutoArchiveAfterInactivity) {
this.walAutoArchiveAfterInactivity = walAutoArchiveAfterInactivity;
return this;
}
/**
* @return time in millis to run auto archiving WAL segment (even if incomplete) after last record log
*/
public long getWalAutoArchiveAfterInactivity() {
return walAutoArchiveAfterInactivity;
}
/**
* This property defines order of writing pages to disk storage during checkpoint.
*
* @return Checkpoint write order.
*/
public CheckpointWriteOrder getCheckpointWriteOrder() {
return checkpointWriteOrder;
}
/**
* This property defines order of writing pages to disk storage during checkpoint.
*
* @param checkpointWriteOrder Checkpoint write order.
*/
public DataStorageConfiguration setCheckpointWriteOrder(CheckpointWriteOrder checkpointWriteOrder) {
this.checkpointWriteOrder = checkpointWriteOrder;
return this;
}
/**
* @return Flag indicating whether WAL compaction is enabled.
*/
public boolean isWalCompactionEnabled() {
return walCompactionEnabled;
}
/**
* Sets flag indicating whether WAL compaction is enabled.
*
* @param walCompactionEnabled Wal compaction enabled flag.
*/
public DataStorageConfiguration setWalCompactionEnabled(boolean walCompactionEnabled) {
this.walCompactionEnabled = walCompactionEnabled;
return this;
}
/**
* @return ZIP level to WAL compaction.
*/
public int getWalCompactionLevel() {
return walCompactionLevel;
}
/**
* @param walCompactionLevel New ZIP level to WAL compaction.
*/
public void setWalCompactionLevel(int walCompactionLevel) {
this.walCompactionLevel = walCompactionLevel;
}
/**
* Returns timeout for checkpoint read lock acquisition.
*
* @see #setCheckpointReadLockTimeout(long)
* @return Returns timeout for checkpoint read lock acquisition in milliseconds.
*/
public Long getCheckpointReadLockTimeout() {
return checkpointReadLockTimeout;
}
/**
* Sets timeout for checkpoint read lock acquisition.
* <p>
* When any thread cannot acquire checkpoint read lock in this time, then critical failure handler is being called.
*
* @param checkpointReadLockTimeout Timeout for checkpoint read lock acquisition in milliseconds.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setCheckpointReadLockTimeout(long checkpointReadLockTimeout) {
this.checkpointReadLockTimeout = checkpointReadLockTimeout;
return this;
}
/**
* Gets compression algorithm for WAL page snapshot records.
*
* @return Page compression algorithm.
*/
public DiskPageCompression getWalPageCompression() {
return walPageCompression == null ? DFLT_WAL_PAGE_COMPRESSION : walPageCompression;
}
/**
* Sets compression algorithm for WAL page snapshot records.
*
* @param walPageCompression Page compression algorithm.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setWalPageCompression(DiskPageCompression walPageCompression) {
this.walPageCompression = walPageCompression;
return this;
}
/**
* Gets {@link #getWalPageCompression algorithm} specific WAL page compression level.
*
* @return WAL page snapshots compression level or {@code null} for default.
*/
public Integer getWalPageCompressionLevel() {
return walPageCompressionLevel;
}
/**
* Sets {@link #setWalPageCompression algorithm} specific page compression level.
*
* @param walPageCompressionLevel Disk page compression level or {@code null} to use default.
* {@link DiskPageCompression#ZSTD Zstd}: from {@code -131072} to {@code 22} (default {@code 3}).
* {@link DiskPageCompression#LZ4 LZ4}: from {@code 0} to {@code 17} (default {@code 0}).
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setWalPageCompressionLevel(Integer walPageCompressionLevel) {
this.walPageCompressionLevel = walPageCompressionLevel;
return this;
}
/**
* Gets encryyption configuration.
*
* @return Encryption configuration.
*/
public EncryptionConfiguration getEncryptionConfiguration() {
return encCfg;
}
/**
* Sets encryption configuration.
*
* @param encCfg Encryption configuration.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setEncryptionConfiguration(EncryptionConfiguration encCfg) {
this.encCfg = encCfg;
return this;
}
/**
* Sets default warm-up configuration.
*
* @param dfltWarmUpCfg Default warm-up configuration. To assign a special
* warm-up configuration for a data region, use
* {@link DataRegionConfiguration#setWarmUpConfiguration}.
* @return {@code this} for chaining.
*/
public DataStorageConfiguration setDefaultWarmUpConfiguration(@Nullable WarmUpConfiguration dfltWarmUpCfg) {
this.dfltWarmUpCfg = dfltWarmUpCfg;
return this;
}
/**
* Gets default warm-up configuration.
*
* @return Default warm-up configuration.
*/
@Nullable public WarmUpConfiguration getDefaultWarmUpConfiguration() {
return dfltWarmUpCfg;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(DataStorageConfiguration.class, this);
}
}
|
|
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.config.materials.ScmMaterialConfig;
import com.thoughtworks.go.config.materials.mercurial.HgMaterialConfig;
import com.thoughtworks.go.config.parts.XmlPartialConfigProvider;
import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry;
import com.thoughtworks.go.config.remote.ConfigRepoConfig;
import com.thoughtworks.go.config.remote.PartialConfig;
import com.thoughtworks.go.config.remote.RepoConfigOrigin;
import com.thoughtworks.go.config.rules.Allow;
import com.thoughtworks.go.config.update.CreatePipelineConfigCommand;
import com.thoughtworks.go.config.update.FullConfigUpdateCommand;
import com.thoughtworks.go.config.validation.GoConfigValidity;
import com.thoughtworks.go.domain.config.Configuration;
import com.thoughtworks.go.domain.materials.MaterialConfig;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.helper.*;
import com.thoughtworks.go.listener.ConfigChangedListener;
import com.thoughtworks.go.plugin.access.artifact.ArtifactMetadataStore;
import com.thoughtworks.go.plugin.api.info.PluginDescriptor;
import com.thoughtworks.go.plugin.domain.artifact.ArtifactPluginInfo;
import com.thoughtworks.go.plugin.domain.artifact.Capabilities;
import com.thoughtworks.go.plugin.domain.common.Metadata;
import com.thoughtworks.go.plugin.domain.common.PluggableInstanceSettings;
import com.thoughtworks.go.plugin.domain.common.PluginConfiguration;
import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
import com.thoughtworks.go.security.CryptoException;
import com.thoughtworks.go.security.GoCipher;
import com.thoughtworks.go.security.ResetCipher;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.service.ArtifactStoreService;
import com.thoughtworks.go.server.service.ExternalArtifactsService;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.server.service.result.DefaultLocalizedOperationResult;
import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult;
import com.thoughtworks.go.serverhealth.HealthStateScope;
import com.thoughtworks.go.serverhealth.HealthStateType;
import com.thoughtworks.go.serverhealth.ServerHealthService;
import com.thoughtworks.go.serverhealth.ServerHealthState;
import com.thoughtworks.go.service.ConfigRepository;
import com.thoughtworks.go.util.*;
import com.thoughtworks.go.util.command.CommandLine;
import com.thoughtworks.go.util.command.ConsoleResult;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ClassPathResource;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import java.util.function.Predicate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static com.thoughtworks.go.helper.ConfigFileFixture.DEFAULT_XML_WITH_2_AGENTS;
import static com.thoughtworks.go.helper.MaterialConfigsMother.git;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:/applicationContext-global.xml",
"classpath:/applicationContext-dataLocalAccess.xml",
"classpath:/testPropertyConfigurer.xml",
"classpath:/spring-all-servlet.xml",
})
public class CachedGoConfigIntegrationTest {
@Autowired
private GoConfigWatchList configWatchList;
@Autowired
private GoConfigRepoConfigDataSource repoConfigDataSource;
@Autowired
private CachedGoConfig cachedGoConfig;
private GoConfigFileHelper configHelper;
@Autowired
private ServerHealthService serverHealthService;
@Autowired
private GoConfigService goConfigService;
@Autowired
private GoConfigDao goConfigDao;
@Autowired
private CachedGoPartials cachedGoPartials;
@Autowired
private PartialConfigService partialConfigService;
@Autowired
private GoFileConfigDataSource goFileConfigDataSource;
@Autowired
private ConfigRepository configRepository;
@Autowired
private ExternalArtifactsService externalArtifactsService;
@Autowired
private ArtifactStoreService artifactStoreService;
@Autowired
private GoConfigMigration goConfigMigration;
@Autowired
private ConfigElementImplementationRegistry registry;
@Autowired
private ConfigCache configCache;
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private Modification latestModification;
private ConfigRepoConfig configRepo;
private File externalConfigRepo;
@Rule
public ExpectedException thrown = ExpectedException.none();
@Rule
public final ResetCipher resetCipher = new ResetCipher();
private MagicalGoConfigXmlLoader magicalGoConfigXmlLoader;
@Before
public void setUp() throws Exception {
configHelper = new GoConfigFileHelper(DEFAULT_XML_WITH_2_AGENTS);
configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile();
configHelper.onSetUp();
externalConfigRepo = temporaryFolder.newFolder();
latestModification = setupExternalConfigRepo(externalConfigRepo);
configHelper.addConfigRepo(createConfigRepoWithDefaultRules(git(externalConfigRepo.getAbsolutePath()), XmlPartialConfigProvider.providerName, "gocd-id"));
goConfigService.forceNotifyListeners();
configRepo = configWatchList.getCurrentConfigRepos().get(0);
cachedGoPartials.clear();
configHelper.addEnvironments("some_environment");
magicalGoConfigXmlLoader = new MagicalGoConfigXmlLoader(configCache, registry);
}
@After
public void tearDown() throws Exception {
cachedGoPartials.clear();
for (PartialConfig partial : cachedGoPartials.lastValidPartials()) {
assertThat(ErrorCollector.getAllErrors(partial).isEmpty()).isTrue();
}
for (PartialConfig partial : cachedGoPartials.lastKnownPartials()) {
assertThat(ErrorCollector.getAllErrors(partial).isEmpty()).isTrue();
}
configHelper.onTearDown();
}
@Test
public void shouldRecoverFromDeepConfigRepoReferencesBug1901When2Repos() throws Exception {
// pipeline references are like this: pipe1 -> downstream
File downstreamExternalConfigRepo = temporaryFolder.newFolder();
/*here is a pipeline 'downstream' with material dependency on 'pipe1' in other repository*/
Modification downstreamLatestModification = setupExternalConfigRepo(downstreamExternalConfigRepo, "external_git_config_repo_referencing_first");
configHelper.addConfigRepo(createConfigRepoWithDefaultRules(git(downstreamExternalConfigRepo.getAbsolutePath()), "gocd-xml", "id"));
goConfigService.forceNotifyListeners();//TODO what if this is not called?
ConfigRepoConfig downstreamConfigRepo = configWatchList.getCurrentConfigRepos().get(1);
assertThat(configWatchList.getCurrentConfigRepos().size()).isEqualTo(2);
// And unluckily downstream gets parsed first
repoConfigDataSource.onCheckoutComplete(downstreamConfigRepo.getRepo(), downstreamExternalConfigRepo, downstreamLatestModification);
// So parsing fails and proper message is shown:
List<ServerHealthState> messageForInvalidMerge = serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(downstreamConfigRepo));
assertThat(messageForInvalidMerge.isEmpty()).isFalse();
assertThat(messageForInvalidMerge.get(0).getDescription()).contains("tries to fetch artifact from pipeline "pipe1"");
// and current config is still old
assertThat(goConfigService.hasPipelineNamed(new CaseInsensitiveString("downstream"))).isFalse();
assertThat(cachedGoPartials.lastKnownPartials().size()).isEqualTo(1);
assertThat(cachedGoPartials.lastValidPartials().size()).isEqualTo(0);
//here downstream partial is waiting to be merged
assertThat(cachedGoPartials.lastKnownPartials().get(0).getGroups().get(0).hasPipeline(new CaseInsensitiveString("downstream"))).isTrue();
// Finally upstream config repository is parsed
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
// now server should be healthy and contain all pipelines
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(configRepo)).isEmpty()).isTrue();
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(downstreamConfigRepo)).isEmpty()).isTrue();
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1"))).isTrue();
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("downstream"))).isTrue();
}
@Test
public void shouldRecoverFromDeepConfigRepoReferencesBug1901When3Repos() throws Exception {
// pipeline references are like this: pipe1 -> downstream -> downstream2
File secondDownstreamExternalConfigRepo = temporaryFolder.newFolder();
/*here is a pipeline 'downstream2' with material dependency on 'downstream' in other repository*/
Modification secondDownstreamLatestModification = setupExternalConfigRepo(secondDownstreamExternalConfigRepo, "external_git_config_repo_referencing_second");
configHelper.addConfigRepo(createConfigRepoWithDefaultRules(git(secondDownstreamExternalConfigRepo.getAbsolutePath()), "gocd-xml", "id1"));
File firstDownstreamExternalConfigRepo = temporaryFolder.newFolder();
/*here is a pipeline 'downstream' with material dependency on 'pipe1' in other repository*/
Modification firstDownstreamLatestModification = setupExternalConfigRepo(firstDownstreamExternalConfigRepo, "external_git_config_repo_referencing_first");
configHelper.addConfigRepo(createConfigRepoWithDefaultRules(git(firstDownstreamExternalConfigRepo.getAbsolutePath()), "gocd-xml", "id2"));
goConfigService.forceNotifyListeners();
ConfigRepoConfig firstDownstreamConfigRepo = configWatchList.getCurrentConfigRepos().get(1);
ConfigRepoConfig secondDownstreamConfigRepo = configWatchList.getCurrentConfigRepos().get(2);
assertThat(configWatchList.getCurrentConfigRepos().size()).isEqualTo(3);
// And unluckily downstream2 gets parsed first
repoConfigDataSource.onCheckoutComplete(secondDownstreamConfigRepo.getRepo(), secondDownstreamExternalConfigRepo, secondDownstreamLatestModification);
// So parsing fails and proper message is shown:
List<ServerHealthState> messageForInvalidMerge = serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(secondDownstreamConfigRepo));
assertThat(messageForInvalidMerge.isEmpty()).isFalse();
assertThat(messageForInvalidMerge.get(0).getDescription()).contains("tries to fetch artifact from pipeline "downstream"");
// and current config is still old
assertThat(goConfigService.hasPipelineNamed(new CaseInsensitiveString("downstream2"))).isFalse();
assertThat(cachedGoPartials.lastKnownPartials().size()).isEqualTo(1);
assertThat(cachedGoPartials.lastValidPartials().size()).isEqualTo(0);
//here downstream2 partial is waiting to be merged
assertThat(cachedGoPartials.lastKnownPartials().get(0).getGroups().get(0).hasPipeline(new CaseInsensitiveString("downstream2"))).isTrue();
// Then middle upstream config repository is parsed
repoConfigDataSource.onCheckoutComplete(firstDownstreamConfigRepo.getRepo(), firstDownstreamExternalConfigRepo, firstDownstreamLatestModification);
// and errors are still shown
messageForInvalidMerge = serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(firstDownstreamConfigRepo));
assertThat(messageForInvalidMerge.isEmpty()).isFalse();
assertThat(messageForInvalidMerge.get(0).getDescription()).contains("Pipeline 'pipe1' does not exist. It is used from pipeline 'downstream'");
// and current config is still old
assertThat(goConfigService.hasPipelineNamed(new CaseInsensitiveString("downstream"))).isFalse();
assertThat(goConfigService.hasPipelineNamed(new CaseInsensitiveString("downstream2"))).isFalse();
assertThat(cachedGoPartials.lastKnownPartials().size()).isEqualTo(2);
assertThat(cachedGoPartials.lastValidPartials().size()).isEqualTo(0);
// Finally upstream config repository is parsed
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
// now server should be healthy and contain all pipelines
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(firstDownstreamConfigRepo)).isEmpty()).isTrue();
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(secondDownstreamConfigRepo)).isEmpty()).isTrue();
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1"))).isTrue();
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("downstream"))).isTrue();
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("downstream2"))).isTrue();
}
@Test
public void shouldFailWhenTryingToAddPipelineDefinedRemotely() throws Exception {
assertThat(configWatchList.getCurrentConfigRepos().size()).isEqualTo(1);
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
assertThat(cachedGoConfig.loadMergedForEditing().hasPipelineNamed(new CaseInsensitiveString("pipe1"))).isTrue();
PipelineConfig dupPipelineConfig = PipelineMother.twoBuildPlansWithResourcesAndSvnMaterialsAtUrl("pipe1", "ut",
"www.spring.com");
try {
goConfigDao.addPipeline(dupPipelineConfig, PipelineConfigs.DEFAULT_GROUP);
} catch (RuntimeException ex) {
assertThat(ex.getMessage()).contains("You have defined multiple pipelines named 'pipe1'. Pipeline names must be unique. Source(s):");
return;
}
fail("Should have thrown");
}
@Test
public void shouldNotifyListenersWhenConfigChanged() {
ConfigChangeListenerStub listener = new ConfigChangeListenerStub();
cachedGoConfig.registerListener(listener);
assertThat(listener.invocationCount).isEqualTo(1);
cachedGoConfig.writeWithLock(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
return cruiseConfig;
}
});
assertThat(listener.invocationCount).isEqualTo(2);
}
@Test
public void shouldReturnMergedConfig_WhenThereIsValidPartialConfig() throws Exception {
assertThat(configWatchList.getCurrentConfigRepos().size()).isEqualTo(1);
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(configRepo)).isEmpty()).isTrue();
assertThat(repoConfigDataSource.latestPartialConfigForMaterial(configRepo.getRepo()).getGroups().findGroup("first").findBy(new CaseInsensitiveString("pipe1"))).isNotNull();
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1"))).isTrue();
}
@Test
public void shouldFailWhenTryingToAddPipelineWithTheSameNameAsAnotherPipelineDefinedRemotely_EntitySave() throws Exception {
assertThat(configWatchList.getCurrentConfigRepos().size()).isEqualTo(1);
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1"))).isTrue();
PipelineConfig dupPipelineConfig = PipelineMother.twoBuildPlansWithResourcesAndSvnMaterialsAtUrl("pipe1", "ut",
"www.spring.com");
try {
goConfigDao.updateConfig(new CreatePipelineConfigCommand(goConfigService, dupPipelineConfig, Username.ANONYMOUS, new DefaultLocalizedOperationResult(), "default", externalArtifactsService), Username.ANONYMOUS);
fail("Should have thrown");
} catch (RuntimeException ex) {
PipelineConfig pipe1 = goConfigService.pipelineConfigNamed(new CaseInsensitiveString("pipe1"));
String errorMessage = dupPipelineConfig.errors().on(PipelineConfig.NAME);
assertThat(errorMessage).contains("You have defined multiple pipelines named 'pipe1'. Pipeline names must be unique. Source(s):");
Matcher matcher = Pattern.compile("^.*\\[(.*),\\s(.*)\\].*$").matcher(errorMessage);
assertThat(matcher.matches()).isTrue();
assertThat(matcher.groupCount()).isEqualTo(2);
List<String> expectedSources = asList(dupPipelineConfig.getOriginDisplayName(), pipe1.getOriginDisplayName());
List<String> actualSources = new ArrayList<>();
for (int i = 1; i <= matcher.groupCount(); i++) {
actualSources.add(matcher.group(i));
}
assertThat(actualSources.size()).isEqualTo(expectedSources.size());
assertThat(actualSources.containsAll(expectedSources)).isTrue();
}
}
@Test
public void shouldFailWhenTryingToAddPipelineWithTheSameNameAsAnotherPipelineDefinedRemotely_FullConfigSave() throws Exception {
assertThat(configWatchList.getCurrentConfigRepos().size()).isEqualTo(1);
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1"))).isTrue();
final PipelineConfig dupPipelineConfig = PipelineMother.twoBuildPlansWithResourcesAndSvnMaterialsAtUrl("pipe1", "ut",
"www.spring.com");
try {
goConfigDao.updateConfig(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getGroups().first().add(dupPipelineConfig);
return cruiseConfig;
}
});
fail("Should have thrown");
} catch (RuntimeException ex) {
String errorMessage = ex.getMessage();
assertThat(errorMessage).contains("You have defined multiple pipelines named 'pipe1'. Pipeline names must be unique. Source(s):");
Matcher matcher = Pattern.compile("^.*\\[(.*),\\s(.*)\\].*$", Pattern.DOTALL | Pattern.MULTILINE).matcher(errorMessage);
assertThat(matcher.matches()).isTrue();
assertThat(matcher.groupCount()).isEqualTo(2);
PipelineConfig pipe1 = goConfigService.pipelineConfigNamed(new CaseInsensitiveString("pipe1"));
List<String> expectedSources = asList(dupPipelineConfig.getOriginDisplayName(), pipe1.getOriginDisplayName());
List<String> actualSources = new ArrayList<>();
for (int i = 1; i <= matcher.groupCount(); i++) {
actualSources.add(matcher.group(i));
}
assertThat(actualSources.size()).isEqualTo(expectedSources.size());
assertThat(actualSources.containsAll(expectedSources)).isTrue();
}
}
@Test
public void shouldReturnRemotePipelinesAmongAllPipelinesInMergedConfigForEdit() throws Exception {
assertThat(configWatchList.getCurrentConfigRepos().size()).isEqualTo(1);
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
assertThat(cachedGoConfig.loadMergedForEditing().hasPipelineNamed(new CaseInsensitiveString("pipe1"))).isTrue();
}
private List<ServerHealthState> findMessageFor(final HealthStateType type) {
return serverHealthService.logs().stream().filter(new Predicate<ServerHealthState>() {
@Override
public boolean test(ServerHealthState element) {
return element.getType().equals(type);
}
}).collect(Collectors.toList());
}
@Test
public void shouldNotifyWithMergedConfig_WhenPartUpdated() throws Exception {
ConfigChangeListenerStub listener = new ConfigChangeListenerStub();
cachedGoConfig.registerListener(listener);
// at registration
assertThat(listener.invocationCount).isEqualTo(1);
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipe1"))).as("currentConfigShouldBeMerged").isTrue();
assertThat(listener.invocationCount).isEqualTo(2);
}
@Test
public void shouldNotNotifyListenersWhenMergeFails() throws IOException {
checkinPartial("config_repo_with_invalid_partial");
ConfigRepoConfig configRepo = configWatchList.getCurrentConfigRepos().get(0);
ConfigChangeListenerStub listener = new ConfigChangeListenerStub();
cachedGoConfig.registerListener(listener);
// at registration
assertThat(listener.invocationCount).isEqualTo(1);
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
assertThat(cachedGoConfig.currentConfig().hasPipelineNamed(new CaseInsensitiveString("pipeline_with_no_stage"))).as("currentConfigShouldBeMainXmlOnly").isFalse();
assertThat(listener.invocationCount).isEqualTo(1);
}
@Test
public void shouldSetErrorHealthStateWhenMergeFails() throws IOException {
checkinPartial("config_repo_with_invalid_partial");
ConfigRepoConfig configRepo = configWatchList.getCurrentConfigRepos().get(0);
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
List<ServerHealthState> messageForInvalidMerge = serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(configRepo));
assertThat(messageForInvalidMerge.isEmpty()).isFalse();
assertThat(messageForInvalidMerge.get(0).getDescription().contains("Pipeline 'pipeline_with_no_stage' does not have any stages configured")).isTrue();
}
@Test
public void shouldUnSetErrorHealthStateWhenMergePasses() throws IOException {
ConfigRepoConfig configRepo = configWatchList.getCurrentConfigRepos().get(0);
checkinPartial("config_repo_with_invalid_partial/bad_partial.gocd.xml");
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(configRepo)).isEmpty()).isFalse();
//fix partial
deletePartial("bad_partial.gocd.xml");
repoConfigDataSource.onCheckoutComplete(configRepo.getRepo(), externalConfigRepo, latestModification);
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(configRepo)).isEmpty()).isTrue();
}
@Test
public void shouldUpdateCachedConfigOnSave() throws Exception {
assertThat(cachedGoConfig.currentConfig().getEnvironments().size()).isEqualTo(1);
configHelper.addEnvironments("new_env");
assertThat(cachedGoConfig.currentConfig().getEnvironments().size()).isEqualTo(2);
}
@Test
public void shouldReloadCachedConfigWhenWriting() throws Exception {
cachedGoConfig.writeWithLock(updateEnvironmentVariables("var1", "value1"));
EnvironmentVariableConfig variable = cachedGoConfig.currentConfig().getEnvironments().get(0).getVariables().getVariable("var1");
assertThat(cachedGoConfig.currentConfig().getEnvironments().get(0).getVariables().size()).isEqualTo(1);
assertThat(variable).isNotNull();
assertThat(variable.getValue()).isEqualTo("value1");
cachedGoConfig.writeWithLock(updateEnvironmentVariables("var2", "value2"));
EnvironmentVariableConfig secondVariable = cachedGoConfig.currentConfig().getEnvironments().get(0).getVariables().getVariable("var2");
assertThat(cachedGoConfig.currentConfig().getEnvironments().get(0).getVariables().size()).isEqualTo(2);
assertThat(secondVariable).isNotNull();
assertThat(secondVariable.getValue()).isEqualTo("value2");
}
@Test
public void shouldReloadCachedConfigFromDisk() throws Exception {
assertThat(cachedGoConfig.currentConfig().getEnvironments().size()).isEqualTo(1);
configHelper.writeXmlToConfigFile(ConfigFileFixture.TASKS_WITH_CONDITION);
cachedGoConfig.forceReload();
assertThat(cachedGoConfig.currentConfig().getEnvironments().size()).isEqualTo(0);
}
@Test
public void shouldInterpolateParamsInTemplate() throws Exception {
String content = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<server>"
+ "<artifacts>\n"
+ "<artifactsDir>artifacts</artifactsDir>\n"
+ "</artifacts>\n"
+ "</server>"
+ "<pipelines>\n"
+ "<pipeline name='dev' template='abc'>\n"
+ " <params>"
+ " <param name='command'>ls</param>"
+ " <param name='dir'>/tmp</param>"
+ " </params>"
+ " <materials>\n"
+ " <svn url =\"svnurl\"/>"
+ " </materials>\n"
+ "</pipeline>\n"
+ "<pipeline name='acceptance' template='abc'>\n"
+ " <params>"
+ " <param name='command'>twist</param>"
+ " <param name='dir'>./acceptance</param>"
+ " </params>"
+ " <materials>\n"
+ " <svn url =\"svnurl\"/>"
+ " </materials>\n"
+ "</pipeline>\n"
+ "</pipelines>\n"
+ "<templates>\n"
+ " <pipeline name='abc'>\n"
+ " <stage name='stage1'>"
+ " <jobs>"
+ " <job name='job1'>"
+ " <tasks>"
+ " <exec command='/bin/#{command}' args='#{dir}'/>"
+ " </tasks>"
+ " </job>"
+ " </jobs>"
+ " </stage>"
+ " </pipeline>\n"
+ "</templates>\n"
+ "</cruise>";
configHelper.writeXmlToConfigFile(content);
cachedGoConfig.forceReload();
CruiseConfig cruiseConfig = cachedGoConfig.currentConfig();
ExecTask devExec = (ExecTask) cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("dev")).getFirstStageConfig().jobConfigByConfigName(new CaseInsensitiveString("job1")).getTasks().first();
assertThat(devExec).isEqualTo(new ExecTask("/bin/ls", "/tmp", (String) null));
ExecTask acceptanceExec = (ExecTask) cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("acceptance")).getFirstStageConfig().jobConfigByConfigName(new CaseInsensitiveString("job1")).getTasks().first();
assertThat(acceptanceExec).isEqualTo(new ExecTask("/bin/twist", "./acceptance", (String) null));
cruiseConfig = cachedGoConfig.loadForEditing();
devExec = (ExecTask) cruiseConfig.getTemplateByName(new CaseInsensitiveString("abc")).get(0).jobConfigByConfigName(new CaseInsensitiveString("job1")).getTasks().first();
assertThat(devExec).isEqualTo(new ExecTask("/bin/#{command}", "#{dir}", (String) null));
assertThat(cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("dev")).size()).isEqualTo(0);
assertThat(cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("acceptance")).size()).isEqualTo(0);
}
@Test
public void shouldHandleParamQuotingCorrectly() throws Exception {
String content = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<server>"
+ "<artifacts>\n"
+ "<artifactsDir>artifacts</artifactsDir>\n"
+ "</artifacts>\n"
+ "</server>\n"
+ "<pipelines>\n"
+ "<pipeline name='dev'>\n"
+ " <params>"
+ " <param name='command'>ls#{a}</param>"
+ " <param name='dir'>/tmp</param>"
+ " </params>"
+ " <materials>\n"
+ " <svn url =\"svnurl\"/>"
+ " </materials>\n"
+ " <stage name='stage1'>"
+ " <jobs>"
+ " <job name='job1'>"
+ " <tasks>"
+ " <exec command='/bin/#{command}##{b}' args='#{dir}'/>"
+ " </tasks>"
+ " </job>"
+ " </jobs>"
+ " </stage>"
+ "</pipeline>\n"
+ "</pipelines>\n"
+ "</cruise>";
configHelper.writeXmlToConfigFile(content);
cachedGoConfig.forceReload();
CruiseConfig cruiseConfig = cachedGoConfig.currentConfig();
ExecTask devExec = (ExecTask) cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("dev")).getFirstStageConfig().jobConfigByConfigName(new CaseInsensitiveString("job1")).getTasks().first();
assertThat(devExec).isEqualTo(new ExecTask("/bin/ls#{a}#{b}", "/tmp", (String) null));
}
@Test
public void shouldAllowParamsInLabelTemplates() throws Exception {
String content = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<server>"
+ "<artifacts>\n"
+ "<artifactsDir>artifacts</artifactsDir>\n"
+ "</artifacts>\n"
+ "</server>\n"
+ "<pipelines>\n"
+ "<pipeline name='dev' labeltemplate='cruise-#{VERSION}-${COUNT}'>\n"
+ " <params>"
+ " <param name='VERSION'>1.2</param>"
+ " </params>"
+ " <materials>\n"
+ " <svn url =\"svnurl\"/>"
+ " </materials>\n"
+ " <stage name='stage1'>"
+ " <jobs>"
+ " <job name='job1'>"
+ " <tasks>"
+ " <exec command='/bin/ls' args='some'/>"
+ " </tasks>"
+ " </job>"
+ " </jobs>"
+ " </stage>"
+ "</pipeline>\n"
+ "</pipelines>\n"
+ "</cruise>";
configHelper.writeXmlToConfigFile(content);
cachedGoConfig.forceReload();
CruiseConfig cruiseConfig = cachedGoConfig.currentConfig();
assertThat(cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("dev")).getLabelTemplate()).isEqualTo("cruise-1.2-${COUNT}");
}
@Test
public void shouldThrowErrorWhenEnvironmentVariablesAreDuplicate() throws Exception {
String content = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<server>"
+ "<artifacts>\n"
+ "<artifactsDir>artifacts</artifactsDir>\n"
+ "</artifacts>\n"
+ "</server>\n"
+ "<pipelines>\n"
+ "<pipeline name='dev'>\n"
+ " <params>"
+ " <param name='product'>GO</param>"
+ " </params>"
+ " <environmentvariables>"
+ " <variable name='#{product}_WORKING_DIR'><value>go_dir</value></variable>"
+ " <variable name='GO_WORKING_DIR'><value>dir</value></variable>"
+ " </environmentvariables>"
+ " <materials>\n"
+ " <svn url =\"svnurl\"/>"
+ " </materials>\n"
+ " <stage name='stage1'>"
+ " <jobs>"
+ " <job name='job1'>"
+ " <tasks>"
+ " <exec command='/bin/ls' args='some'/>"
+ " </tasks>"
+ " </job>"
+ " </jobs>"
+ " </stage>"
+ "</pipeline>\n"
+ "</pipelines>\n"
+ "</cruise>";
configHelper.writeXmlToConfigFile(content);
GoConfigValidity configValidity = cachedGoConfig.checkConfigFileValid();
assertThat(configValidity.isValid()).isEqualTo(false);
GoConfigValidity.InvalidGoConfig invalidGoConfig = (GoConfigValidity.InvalidGoConfig) configValidity;
assertThat(invalidGoConfig.errorMessage()).contains("Environment Variable name 'GO_WORKING_DIR' is not unique for pipeline 'dev'");
}
@Test
public void shouldReturnCachedConfigIfConfigFileIsInvalid() throws Exception {
CruiseConfig before = cachedGoConfig.currentConfig();
assertThat(before.getEnvironments().size()).isEqualTo(1);
configHelper.writeXmlToConfigFile("invalid-xml");
cachedGoConfig.forceReload();
assertThat(cachedGoConfig.currentConfig() == before).isTrue();
assertThat(cachedGoConfig.checkConfigFileValid().isValid()).isEqualTo(false);
}
@Test
public void shouldClearInvalidExceptionWhenConfigErrorsAreFixed() throws Exception {
configHelper.writeXmlToConfigFile("invalid-xml");
cachedGoConfig.forceReload();
cachedGoConfig.currentConfig();
assertThat(cachedGoConfig.checkConfigFileValid().isValid()).isEqualTo(false);
configHelper.addEnvironments("uat");//some valid change
CruiseConfig cruiseConfig = cachedGoConfig.currentConfig();
assertThat(cruiseConfig.getEnvironments().size()).isEqualTo(2);
assertThat(cachedGoConfig.checkConfigFileValid().isValid()).isEqualTo(true);
}
@Test
public void shouldSetServerHealthMessageWhenConfigFileIsInvalid() throws IOException {
configHelper.writeXmlToConfigFile("invalid-xml");
cachedGoConfig.forceReload();
assertThat(cachedGoConfig.checkConfigFileValid().isValid()).isEqualTo(false);
List<ServerHealthState> serverHealthStates = serverHealthService.logs();
assertThat(serverHealthStates.isEmpty()).isFalse();
assertThat(serverHealthStates.contains(ServerHealthState.error(GoConfigService.INVALID_CRUISE_CONFIG_XML, "Error on line 1: Content is not allowed in prolog.", HealthStateType.invalidConfig()))).isTrue();
}
@Test
public void shouldClearServerHealthMessageWhenConfigFileIsValid() throws IOException {
serverHealthService.update(ServerHealthState.error(GoConfigService.INVALID_CRUISE_CONFIG_XML, "Error on line 1: Content is not allowed in prolog.", HealthStateType.invalidConfig()));
assertThat(findMessageFor(HealthStateType.invalidConfig()).isEmpty()).isFalse();
configHelper.writeXmlToConfigFile(ConfigFileFixture.TASKS_WITH_CONDITION);
cachedGoConfig.forceReload();
assertThat(cachedGoConfig.checkConfigFileValid().isValid()).isEqualTo(true);
assertThat(findMessageFor(HealthStateType.invalidConfig()).isEmpty()).isTrue();
}
@Test
public void shouldReturnDefaultCruiseConfigIfLoadingTheConfigFailsForTheFirstTime() throws Exception {
ReflectionUtil.setField(cachedGoConfig, "currentConfig", null);
configHelper.writeXmlToConfigFile("invalid-xml");
assertThat(cachedGoConfig.currentConfig()).isEqualTo(new BasicCruiseConfig());
}
@Test
public void shouldGetConfigForEditAndRead() throws Exception {
CruiseConfig cruiseConfig = configHelper.load();
addPipelineWithParams(cruiseConfig);
configHelper.writeConfigFile(cruiseConfig);
PipelineConfig config = cachedGoConfig.currentConfig().pipelineConfigByName(new CaseInsensitiveString("mingle"));
HgMaterialConfig hgMaterialConfig = (HgMaterialConfig) byFolder(config.materialConfigs(), "folder");
assertThat(hgMaterialConfig.getUrl()).isEqualTo("http://hg-server/repo-name");
config = cachedGoConfig.loadForEditing().pipelineConfigByName(new CaseInsensitiveString("mingle"));
hgMaterialConfig = (HgMaterialConfig) byFolder(config.materialConfigs(), "folder");
assertThat(hgMaterialConfig.getUrl()).isEqualTo("http://#{foo}/#{bar}");
}
@Test
public void shouldLoadConfigForReadAndEditWhenNewXMLIsWritten() throws Exception {
String pipelineName = "mingle";
CruiseConfig configToBeWritten = magicalGoConfigXmlLoader.deserializeConfig(configXmlWithPipeline(pipelineName));
cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(configToBeWritten, cachedGoConfig.currentConfig().getMd5()));
PipelineConfig reloadedPipelineConfig = cachedGoConfig.currentConfig().pipelineConfigByName(new CaseInsensitiveString(pipelineName));
HgMaterialConfig hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
assertThat(hgMaterialConfig.getUrl()).isEqualTo("http://hg-server/repo-name");
reloadedPipelineConfig = cachedGoConfig.loadForEditing().pipelineConfigByName(new CaseInsensitiveString(pipelineName));
hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
assertThat(hgMaterialConfig.getUrl()).isEqualTo("http://#{foo}/#{bar}");
GoConfigHolder configHolder = cachedGoConfig.loadConfigHolder();
reloadedPipelineConfig = configHolder.config.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
assertThat(hgMaterialConfig.getUrl()).isEqualTo("http://hg-server/repo-name");
reloadedPipelineConfig = configHolder.configForEdit.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
assertThat(hgMaterialConfig.getUrl()).isEqualTo("http://#{foo}/#{bar}");
}
@Test
public void shouldLoadConfigForReadAndEditWhenConfigIsUpdatedThoughACommand() throws Exception {
cachedGoConfig.writeWithLock(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
addPipelineWithParams(cruiseConfig);
return cruiseConfig;
}
});
PipelineConfig reloadedPipelineConfig = cachedGoConfig.currentConfig().pipelineConfigByName(new CaseInsensitiveString("mingle"));
HgMaterialConfig hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
assertThat(hgMaterialConfig.getUrl()).isEqualTo("http://hg-server/repo-name");
reloadedPipelineConfig = cachedGoConfig.loadForEditing().pipelineConfigByName(new CaseInsensitiveString("mingle"));
hgMaterialConfig = (HgMaterialConfig) byFolder(reloadedPipelineConfig.materialConfigs(), "folder");
assertThat(hgMaterialConfig.getUrl()).isEqualTo("http://#{foo}/#{bar}");
}
private String configXmlWithPipeline(String pipelineName) {
return "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
"<cruise xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:noNamespaceSchemaLocation=\"cruise-config.xsd\" schemaVersion=\"" + GoConstants.CONFIG_SCHEMA_VERSION + "\">\n" +
" <server serverId=\"dd8d0f5a-7e8d-4948-a1c7-ddcedbac15d0\">\n" +
" <artifacts>\n" +
" <artifactsDir>artifacts</artifactsDir>\n" +
" </artifacts>\n" +
" </server>\n" +
" <pipelines group=\"another\">\n" +
" <pipeline name=\"" + pipelineName + "\">\n" +
" <params>\n" +
" <param name=\"foo\">hg-server</param>\n" +
" <param name=\"bar\">repo-name</param>\n" +
" </params>\n" +
" <materials>\n" +
" <svn url=\"http://some/svn/url\" dest=\"svnDir\" materialName=\"url\" />\n" +
" <hg url=\"http://#{foo}/#{bar}\" dest=\"folder\" />\n" +
" </materials>\n" +
" <stage name=\"dev\">\n" +
" <jobs>\n" +
" <job name=\"ant\">\n" +
" <tasks><ant /></tasks>\n" +
" </job>\n" +
" </jobs>\n" +
" </stage>\n" +
" </pipeline>\n" +
" </pipelines>\n" +
"</cruise>\n" +
"\n";
}
@Test
public void shouldReturnUpdatedStatusWhenConfigIsUpdatedWithLatestCopy() {
final String md5 = cachedGoConfig.currentConfig().getMd5();
ConfigSaveState firstSaveState = cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.addPipeline("g1", PipelineConfigMother.createPipelineConfig("p1", "s1", "j1"));
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
assertThat(firstSaveState).isEqualTo(ConfigSaveState.UPDATED);
}
@Test
public void shouldReturnMergedStatusWhenConfigIsMergedWithStaleCopy() {
final String md5 = cachedGoConfig.currentConfig().getMd5();
ConfigSaveState firstSaveState = cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.addPipeline("g1", PipelineConfigMother.createPipelineConfig("p1", "s1", "j1"));
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
assertThat(firstSaveState).isEqualTo(ConfigSaveState.UPDATED);
ConfigSaveState secondSaveState = cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.server().setArtifactsDir("something");
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
assertThat(secondSaveState).isEqualTo(ConfigSaveState.MERGED);
}
@Test
public void shouldNotAllowAGitMergeOfConcurrentChangesIfTheChangeCausesMergedPartialsToBecomeInvalid() {
final String upstream = UUID.randomUUID().toString();
String remoteDownstream = "remote-downstream";
setupExternalConfigRepoWithDependencyMaterialOnPipelineInMainXml(upstream, remoteDownstream);
final String md5 = cachedGoConfig.currentConfig().getMd5();
// some random unrelated change to force a git merge workflow
cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.server().setJobTimeout("10");
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
thrown.expectMessage(String.format("Stage with name 'stage' does not exist on pipeline '%s', it is being referred to from pipeline 'remote-downstream' (%s at r1)", upstream, configRepo.getRepo().getDisplayName()));
cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString(upstream)).getFirstStageConfig().setName(new CaseInsensitiveString("new_name"));
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
}
@Test
public void shouldMarkAPartialAsValidIfItBecomesValidBecauseOfNewerChangesInMainXml_GitMergeWorkflow() {
final String upstream = UUID.randomUUID().toString();
String remoteDownstream = "remote-downstream";
setupExternalConfigRepoWithDependencyMaterialOnPipelineInMainXml(upstream, remoteDownstream);
PartialConfig partialWithStageRenamed = ClonerFactory.instance().deepClone(cachedGoPartials.lastValidPartials().get(0));
PipelineConfig pipelineInRemoteConfigRepo = partialWithStageRenamed.getGroups().get(0).getPipelines().get(0);
pipelineInRemoteConfigRepo.materialConfigs().getDependencyMaterial().setStageName(new CaseInsensitiveString("new_name"));
partialWithStageRenamed.setOrigin(new RepoConfigOrigin(configRepo, "r2"));
partialConfigService.onSuccessPartialConfig(configRepo, partialWithStageRenamed);
final String md5 = cachedGoConfig.currentConfig().getMd5();
// some random unrelated change to force a git merge workflow
cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.server().setJobTimeout("10");
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
ConfigSaveState saveState = cachedGoConfig.writeWithLock(new NoOverwriteUpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString(upstream)).getFirstStageConfig().setName(new CaseInsensitiveString("new_name"));
return cruiseConfig;
}
@Override
public String unmodifiedMd5() {
return md5;
}
});
assertThat(saveState).isEqualTo(ConfigSaveState.MERGED);
assertThat(cachedGoPartials.lastValidPartials().get(0).getGroups().first().get(0).materialConfigs().getDependencyMaterial().getStageName()).isEqualTo(new CaseInsensitiveString("new_name"));
assertThat(goConfigService.getConfigForEditing().getPipelineConfigByName(new CaseInsensitiveString(upstream)).getFirstStageConfig().name()).isEqualTo(new CaseInsensitiveString("new_name"));
assertThat(goConfigService.getCurrentConfig().getPipelineConfigByName(new CaseInsensitiveString(upstream)).getFirstStageConfig().name()).isEqualTo(new CaseInsensitiveString("new_name"));
}
private void setupExternalConfigRepoWithDependencyMaterialOnPipelineInMainXml(String upstream, String remoteDownstreamPipelineName) {
PipelineConfig upstreamPipelineConfig = GoConfigMother.createPipelineConfigWithMaterialConfig(upstream, git("FOO"));
goConfigService.addPipeline(upstreamPipelineConfig, "default");
PartialConfig partialConfig = PartialConfigMother.pipelineWithDependencyMaterial(remoteDownstreamPipelineName, upstreamPipelineConfig, new RepoConfigOrigin(configRepo, "r1"));
partialConfigService.onSuccessPartialConfig(configRepo, partialConfig);
}
@Test
public void shouldSaveConfigChangesWhenFullConfigIsBeingSavedFromConfigXmlTabAndAllKnownConfigRepoPartialsAreInvalid() throws Exception {
cachedGoPartials.clear();
PartialConfig invalidPartial = PartialConfigMother.invalidPartial("invalid", new RepoConfigOrigin(configRepo, "revision1"));
partialConfigService.onSuccessPartialConfig(configRepo, invalidPartial);
CruiseConfig updatedConfig = ClonerFactory.instance().deepClone(goConfigService.getConfigForEditing());
updatedConfig.server().setJobTimeout("10");
String updatedXml = goFileConfigDataSource.configAsXml(updatedConfig, false);
FileUtils.writeStringToFile(new File(goConfigDao.fileLocation()), updatedXml, UTF_8);
GoConfigValidity validity = goConfigService.fileSaver(false).saveXml(updatedXml, goConfigDao.md5OfConfigFile());
assertThat(validity.isValid()).isTrue();
assertThat(cachedGoPartials.lastValidPartials().isEmpty()).isTrue();
assertThat(cachedGoPartials.lastKnownPartials().contains(invalidPartial)).isTrue();
}
@Test
public void shouldAllowFallbackMergeAndSaveWhenKnownPartialHasAnInvalidEnvironmentThatRefersToAnUnknownPipeline() throws Exception {
cachedGoPartials.clear();
PartialConfig partialConfigWithInvalidEnvironment = PartialConfigMother.withEnvironment("env", new RepoConfigOrigin(configRepo, "revision1"));
partialConfigService.onSuccessPartialConfig(configRepo, partialConfigWithInvalidEnvironment);
ConfigSaveState state = cachedGoConfig.writeWithLock(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.server().setJobTimeout("10");
return cruiseConfig;
}
});
assertThat(state).isEqualTo(ConfigSaveState.UPDATED);
assertThat(goConfigService.getCurrentConfig().server().getJobTimeout()).isEqualTo("10");
}
@Test
public void shouldRemoveCorrespondingRemotePipelinesFromCachedGoConfigIfTheConfigRepoIsDeleted() {
final ConfigRepoConfig repoConfig1 = createConfigRepoWithDefaultRules(MaterialConfigsMother.gitMaterialConfig("url1"), XmlPartialConfigProvider.providerName, "id1");
final ConfigRepoConfig repoConfig2 = createConfigRepoWithDefaultRules(MaterialConfigsMother.gitMaterialConfig("url2"), XmlPartialConfigProvider.providerName, "id2");
goConfigService.updateConfig(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getConfigRepos().add(repoConfig1);
cruiseConfig.getConfigRepos().add(repoConfig2);
return cruiseConfig;
}
});
PartialConfig partialConfigInRepo1 = PartialConfigMother.withPipeline("pipeline_in_repo1", new RepoConfigOrigin(repoConfig1, "repo1_r1"));
PartialConfig partialConfigInRepo2 = PartialConfigMother.withPipeline("pipeline_in_repo2", new RepoConfigOrigin(repoConfig2, "repo2_r1"));
partialConfigService.onSuccessPartialConfig(repoConfig1, partialConfigInRepo1);
partialConfigService.onSuccessPartialConfig(repoConfig2, partialConfigInRepo2);
// introduce an invalid change in repo1 so that there is a server health message corresponding to it
PartialConfig invalidPartialInRepo1Revision2 = PartialConfigMother.invalidPartial("pipeline_in_repo1", new RepoConfigOrigin(repoConfig1, "repo1_r2"));
partialConfigService.onSuccessPartialConfig(repoConfig1, invalidPartialInRepo1Revision2);
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).size()).isEqualTo(1);
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getMessage()).isEqualTo("Invalid Merged Configuration");
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).get(0).getDescription()).isEqualTo("Number of errors: 1+\n1. Invalid stage name ''. This must be alphanumeric and can contain underscores, hyphens and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.;; \n- For Config Repo: url1 at repo1_r2");
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty()).isTrue();
int countBeforeDeletion = cachedGoConfig.currentConfig().getConfigRepos().size();
ConfigSaveState configSaveState = cachedGoConfig.writeWithLock(new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.getConfigRepos().remove(repoConfig1);
return cruiseConfig;
}
});
assertThat(configSaveState).isEqualTo(ConfigSaveState.UPDATED);
assertThat(cachedGoConfig.currentConfig().getConfigRepos().size()).isEqualTo(countBeforeDeletion - 1);
assertThat(cachedGoConfig.currentConfig().getConfigRepos().contains(repoConfig2)).isTrue();
assertThat(cachedGoConfig.currentConfig().getAllPipelineNames().contains(new CaseInsensitiveString("pipeline_in_repo1"))).isFalse();
assertThat(cachedGoConfig.currentConfig().getAllPipelineNames().contains(new CaseInsensitiveString("pipeline_in_repo2"))).isTrue();
assertThat(cachedGoPartials.lastKnownPartials().size()).isEqualTo(1);
assertThat(((RepoConfigOrigin) cachedGoPartials.lastKnownPartials().get(0).getOrigin()).getMaterial().getFingerprint().equals(repoConfig2.getRepo().getFingerprint())).isTrue();
assertThat(cachedGoPartials.lastKnownPartials().stream().filter(new Predicate<PartialConfig>() {
@Override
public boolean test(PartialConfig item) {
return ((RepoConfigOrigin) item.getOrigin()).getMaterial().getFingerprint().equals(repoConfig1.getRepo().getFingerprint());
}
}).findFirst().orElse(null)).isNull();
assertThat(cachedGoPartials.lastValidPartials().size()).isEqualTo(1);
assertThat(((RepoConfigOrigin) cachedGoPartials.lastValidPartials().get(0).getOrigin()).getMaterial().getFingerprint().equals(repoConfig2.getRepo().getFingerprint())).isTrue();
assertThat(cachedGoPartials.lastValidPartials().stream().filter(new Predicate<PartialConfig>() {
@Override
public boolean test(PartialConfig item) {
return ((RepoConfigOrigin) item.getOrigin()).getMaterial().getFingerprint().equals(repoConfig1.getRepo().getFingerprint());
}
}).findFirst().orElse(null)).isNull();
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig1)).isEmpty()).isTrue();
assertThat(serverHealthService.filterByScope(HealthStateScope.forPartialConfigRepo(repoConfig2)).isEmpty()).isTrue();
}
@Test
public void shouldUpdateConfigWhenPartialsAreNotConfigured() throws GitAPIException, IOException {
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
BasicCruiseConfig config = GoConfigMother.configWithPipelines("pipeline1");
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(config, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(state).isEqualTo(ConfigSaveState.UPDATED);
assertThat(cachedGoConfig.loadForEditing()).isEqualTo(config);
assertThat(gitShaAfterSave).isNotEqualTo(gitShaBeforeSave);
assertThat(cachedGoConfig.loadForEditing().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(cachedGoConfig.currentConfig().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(configXmlFromConfigFolder).isEqualTo(configRepository.getCurrentRevision().getContent());
}
@Test
public void writeFullConfigWithLockShouldUpdateReloadStrategyToEnsureReloadIsSkippedInAbsenceOfConfigFileChanges() throws GitAPIException, IOException {
BasicCruiseConfig config = GoConfigMother.configWithPipelines("pipeline1");
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(config, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
assertThat(state).isEqualTo(ConfigSaveState.UPDATED);
cachedGoConfig.forceReload();
String gitShaAfterReload = configRepository.getCurrentRevCommit().getName();
assertThat(gitShaAfterReload).isEqualTo(gitShaAfterSave);
}
@Test
public void shouldUpdateConfigWhenPartialsAreConfigured() throws GitAPIException, IOException {
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
PartialConfig validPartial = PartialConfigMother.withPipeline("remote_pipeline", new RepoConfigOrigin(configRepo, "revision1"));
partialConfigService.onSuccessPartialConfig(configRepo, validPartial);
assertThat(cachedGoPartials.lastValidPartials().contains(validPartial)).isTrue();
assertThat(cachedGoPartials.lastKnownPartials().contains(validPartial)).isTrue();
CruiseConfig config = ClonerFactory.instance().deepClone(cachedGoConfig.loadForEditing());
config.addEnvironment(UUID.randomUUID().toString());
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(config, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(state).isEqualTo(ConfigSaveState.UPDATED);
assertThat(cachedGoConfig.loadForEditing()).isEqualTo(config);
assertThat(gitShaAfterSave).isNotEqualTo(gitShaBeforeSave);
assertThat(cachedGoConfig.loadForEditing().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(cachedGoConfig.currentConfig().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(configXmlFromConfigFolder).isEqualTo(configRepository.getCurrentRevision().getContent());
assertThat(cachedGoPartials.lastValidPartials().contains(validPartial)).isTrue();
assertThat(cachedGoPartials.lastKnownPartials().contains(validPartial)).isTrue();
}
@Test
public void shouldUpdateConfigWithNoValidPartialsAndInvalidKnownPartials() throws GitAPIException, IOException {
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
PartialConfig invalidPartial = PartialConfigMother.invalidPartial("invalid", new RepoConfigOrigin(configRepo, "revision1"));
partialConfigService.onSuccessPartialConfig(configRepo, invalidPartial);
assertThat(cachedGoPartials.lastValidPartials().isEmpty()).isTrue();
assertThat(cachedGoPartials.lastKnownPartials().contains(invalidPartial)).isTrue();
CruiseConfig config = ClonerFactory.instance().deepClone(cachedGoConfig.loadForEditing());
config.addEnvironment(UUID.randomUUID().toString());
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(config, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(state).isEqualTo(ConfigSaveState.UPDATED);
assertThat(cachedGoConfig.loadForEditing()).isEqualTo(config);
assertThat(gitShaAfterSave).isNotEqualTo(gitShaBeforeSave);
assertThat(cachedGoConfig.loadForEditing().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(cachedGoConfig.currentConfig().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(configXmlFromConfigFolder).isEqualTo(configRepository.getCurrentRevision().getContent());
assertThat(cachedGoPartials.lastValidPartials().isEmpty()).isTrue();
assertThat(cachedGoPartials.lastKnownPartials().contains(invalidPartial)).isTrue();
}
@Test
public void shouldUpdateConfigWithValidPartialsAndInvalidKnownPartials() throws GitAPIException, IOException {
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
PartialConfig validPartial = PartialConfigMother.withPipeline("remote_pipeline", new RepoConfigOrigin(configRepo, "revision1"));
PartialConfig invalidPartial = PartialConfigMother.invalidPartial("invalid", new RepoConfigOrigin(configRepo, "revision2"));
partialConfigService.onSuccessPartialConfig(configRepo, validPartial);
partialConfigService.onSuccessPartialConfig(configRepo, invalidPartial);
assertThat(cachedGoPartials.lastValidPartials().contains(validPartial)).isTrue();
assertThat(cachedGoPartials.lastKnownPartials().contains(invalidPartial)).isTrue();
CruiseConfig config = ClonerFactory.instance().deepClone(cachedGoConfig.loadForEditing());
config.addEnvironment(UUID.randomUUID().toString());
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(config, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(state).isEqualTo(ConfigSaveState.UPDATED);
assertThat(cachedGoConfig.loadForEditing()).isEqualTo(config);
assertThat(gitShaAfterSave).isNotEqualTo(gitShaBeforeSave);
assertThat(cachedGoConfig.loadForEditing().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(cachedGoConfig.currentConfig().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(configXmlFromConfigFolder).isEqualTo(configRepository.getCurrentRevision().getContent());
assertThat(cachedGoPartials.lastValidPartials().contains(validPartial)).isTrue();
assertThat(cachedGoPartials.lastKnownPartials().contains(invalidPartial)).isTrue();
}
@Test
public void shouldErrorOutOnUpdateConfigWithValidPartials_WithMainConfigBreakingPartials() throws GitAPIException, IOException {
setupExternalConfigRepoWithDependencyMaterialOnPipelineInMainXml("upstream", "downstream");
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
CruiseConfig originalConfig = cachedGoConfig.loadForEditing();
CruiseConfig editedConfig = ClonerFactory.instance().deepClone(originalConfig);
editedConfig.getGroups().remove(editedConfig.findGroup("default"));
try {
cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(editedConfig, goConfigService.configFileMd5()));
fail("Expected the test to fail");
} catch (Exception e) {
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(cachedGoConfig.loadForEditing()).isEqualTo(originalConfig);
assertThat(gitShaAfterSave).isEqualTo(gitShaBeforeSave);
assertThat(cachedGoConfig.loadForEditing().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(cachedGoConfig.currentConfig().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(configXmlFromConfigFolder).isEqualTo(configRepository.getCurrentRevision().getContent());
RepoConfigOrigin origin = (RepoConfigOrigin) cachedGoPartials.lastValidPartials().get(0).getOrigin();
assertThat(origin.getRevision()).isEqualTo("r1");
}
}
@Test
public void shouldMarkAPreviousInvalidPartialAsValid_IfMainXMLSatisfiesTheDependency() throws GitAPIException, IOException {
String gitShaBeforeSave = configRepository.getCurrentRevCommit().getName();
PipelineConfig upstream = PipelineConfigMother.createPipelineConfig("upstream", "S", "J");
PartialConfig partialConfig = PartialConfigMother.pipelineWithDependencyMaterial("downstream", upstream, new RepoConfigOrigin(configRepo, "r2"));
partialConfigService.onSuccessPartialConfig(configRepo, partialConfig);
assertThat(cachedGoPartials.lastKnownPartials().contains(partialConfig)).isTrue();
assertThat(cachedGoPartials.lastValidPartials().isEmpty()).isTrue();
CruiseConfig originalConfig = cachedGoConfig.loadForEditing();
CruiseConfig editedConfig = ClonerFactory.instance().deepClone(originalConfig);
editedConfig.addPipeline("default", upstream);
ConfigSaveState state = cachedGoConfig.writeFullConfigWithLock(new FullConfigUpdateCommand(editedConfig, goConfigService.configFileMd5()));
String gitShaAfterSave = configRepository.getCurrentRevCommit().getName();
String configXmlFromConfigFolder = FileUtils.readFileToString(new File(goConfigDao.fileLocation()), UTF_8);
assertThat(state).isEqualTo(ConfigSaveState.UPDATED);
assertThat(cachedGoConfig.loadForEditing()).isEqualTo(editedConfig);
assertThat(gitShaAfterSave).isNotEqualTo(gitShaBeforeSave);
assertThat(cachedGoConfig.loadForEditing().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(cachedGoConfig.currentConfig().getMd5()).isEqualTo(configRepository.getCurrentRevision().getMd5());
assertThat(configXmlFromConfigFolder).isEqualTo(configRepository.getCurrentRevision().getContent());
RepoConfigOrigin origin = (RepoConfigOrigin) cachedGoPartials.lastValidPartials().get(0).getOrigin();
assertThat(origin.getRevision()).isEqualTo("r2");
assertThat(cachedGoPartials.lastKnownPartials().contains(partialConfig)).isTrue();
assertThat(cachedGoPartials.lastValidPartials().contains(partialConfig)).isTrue();
}
@Test
public void shouldEncryptPluggablePublishArtifactPropertiesDuringSave() throws Exception {
resetCipher.setupAESCipherFile();
resetCipher.setupDESCipherFile();
setupMetadataForPlugin();
ArtifactStore artifactStore = new ArtifactStore("dockerhub", "cd.go.artifact.docker.registry");
artifactStoreService.create(Username.ANONYMOUS, artifactStore, new HttpLocalizedOperationResult());
File configFile = new File(new SystemEnvironment().getCruiseConfigFile());
String config = goConfigMigration.upgradeIfNecessary(IOUtils.toString(getClass().getResourceAsStream("/data/pluggable_artifacts_with_params.xml"), UTF_8));
FileUtils.writeStringToFile(configFile, config, UTF_8);
cachedGoConfig.forceReload();
Configuration ancestorPluggablePublishAftifactConfigAfterEncryption = goConfigDao.loadConfigHolder()
.configForEdit.pipelineConfigByName(new CaseInsensitiveString("ancestor"))
.getExternalArtifactConfigs().get(0).getConfiguration();
assertThat(ancestorPluggablePublishAftifactConfigAfterEncryption.getProperty("Image").getValue()).isEqualTo("IMAGE_SECRET");
assertThat(ancestorPluggablePublishAftifactConfigAfterEncryption.getProperty("Image").getEncryptedValue()).isEqualTo(new GoCipher().encrypt("IMAGE_SECRET"));
assertThat(ancestorPluggablePublishAftifactConfigAfterEncryption.getProperty("Image").getConfigValue()).isNull();
}
@Test
public void shouldEncryptPluggableFetchArtifactPropertiesDuringSave() throws IOException, CryptoException {
resetCipher.setupAESCipherFile();
resetCipher.setupDESCipherFile();
setupMetadataForPlugin();
ArtifactStore artifactStore = new ArtifactStore("dockerhub", "cd.go.artifact.docker.registry");
artifactStoreService.create(Username.ANONYMOUS, artifactStore, new HttpLocalizedOperationResult());
File configFile = new File(new SystemEnvironment().getCruiseConfigFile());
String config = goConfigMigration.upgradeIfNecessary(IOUtils.toString(getClass().getResourceAsStream("/data/pluggable_artifacts_with_params.xml"), UTF_8));
FileUtils.writeStringToFile(configFile, config, UTF_8);
cachedGoConfig.forceReload();
PipelineConfig child = goConfigDao.loadConfigHolder().configForEdit.pipelineConfigByName(new CaseInsensitiveString("child"));
Configuration childFetchConfigAfterEncryption = ((FetchPluggableArtifactTask) child
.get(0).getJobs().get(0).tasks().get(0)).getConfiguration();
assertThat(childFetchConfigAfterEncryption.getProperty("FetchProperty").getValue()).isEqualTo("SECRET");
assertThat(childFetchConfigAfterEncryption.getProperty("FetchProperty").getEncryptedValue()).isEqualTo(new GoCipher().encrypt("SECRET"));
assertThat(childFetchConfigAfterEncryption.getProperty("FetchProperty").getConfigValue()).isNull();
}
private void setupMetadataForPlugin() {
PluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id("cd.go.artifact.docker.registry").build();
PluginConfiguration buildFile = new PluginConfiguration("BuildFile", new Metadata(false, false));
PluginConfiguration image = new PluginConfiguration("Image", new Metadata(false, true));
PluginConfiguration tag = new PluginConfiguration("Tag", new Metadata(false, false));
PluginConfiguration fetchProperty = new PluginConfiguration("FetchProperty", new Metadata(false, true));
PluginConfiguration fetchTag = new PluginConfiguration("Tag", new Metadata(false, false));
PluginConfiguration registryUrl = new PluginConfiguration("RegistryURL", new Metadata(true, false));
PluginConfiguration username = new PluginConfiguration("Username", new Metadata(false, false));
PluginConfiguration password = new PluginConfiguration("Password", new Metadata(false, true));
PluggableInstanceSettings storeConfigSettings = new PluggableInstanceSettings(asList(registryUrl, username, password));
PluggableInstanceSettings publishArtifactSettings = new PluggableInstanceSettings(asList(buildFile, image, tag));
PluggableInstanceSettings fetchArtifactSettings = new PluggableInstanceSettings(asList(fetchProperty, fetchTag));
ArtifactPluginInfo artifactPluginInfo = new ArtifactPluginInfo(pluginDescriptor, storeConfigSettings, publishArtifactSettings, fetchArtifactSettings, null, new Capabilities());
ArtifactMetadataStore.instance().setPluginInfo(artifactPluginInfo);
}
private void addPipelineWithParams(CruiseConfig cruiseConfig) {
PipelineConfig pipelineConfig = PipelineConfigMother.createPipelineConfig("mingle", "dev", "ant");
pipelineConfig.addParam(new ParamConfig("foo", "hg-server"));
pipelineConfig.addParam(new ParamConfig("bar", "repo-name"));
pipelineConfig.addMaterialConfig(MaterialConfigsMother.hgMaterialConfig("http://#{foo}/#{bar}", "folder"));
cruiseConfig.addPipeline("another", pipelineConfig);
}
public MaterialConfig byFolder(MaterialConfigs materialConfigs, String folder) {
for (MaterialConfig materialConfig : materialConfigs) {
if (materialConfig instanceof ScmMaterialConfig && Objects.equals(folder, materialConfig.getFolder())) {
return materialConfig;
}
}
return null;
}
private UpdateConfigCommand updateEnvironmentVariables(final String name, final String value) {
return new UpdateConfigCommand() {
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) {
EnvironmentConfig environmentConfig = cruiseConfig.getEnvironments().get(0);
environmentConfig.addEnvironmentVariable(name, value);
return cruiseConfig;
}
};
}
private void deletePartial(String partial) {
FileUtils.deleteQuietly(new File(externalConfigRepo, partial));
gitAddDotAndCommit(externalConfigRepo);
}
private void checkinPartial(String partial) throws IOException {
File externalConfigRepo = this.externalConfigRepo;
checkInPartial(partial, externalConfigRepo);
}
private void checkInPartial(String partial, File externalConfigRepo) throws IOException {
ClassPathResource resource = new ClassPathResource(partial);
if (resource.getFile().isDirectory()) {
FileUtils.copyDirectory(resource.getFile(), externalConfigRepo);
} else {
FileUtils.copyFileToDirectory(resource.getFile(), externalConfigRepo);
}
gitAddDotAndCommit(externalConfigRepo);
}
private class ConfigChangeListenerStub implements ConfigChangedListener {
private int invocationCount = 0;
@Override
public void onConfigChange(CruiseConfig newCruiseConfig) {
invocationCount++;
}
}
private Modification setupExternalConfigRepo(File configRepo) throws IOException {
String configRepoTestResource = "external_git_config_repo";
return setupExternalConfigRepo(configRepo, configRepoTestResource);
}
private Modification setupExternalConfigRepo(File configRepo, String configRepoTestResource) throws IOException {
ClassPathResource resource = new ClassPathResource(configRepoTestResource);
FileUtils.copyDirectory(resource.getFile(), configRepo);
CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("init").withArg(configRepo.getAbsolutePath()).runOrBomb(null);
CommandLine.createCommandLine("git").withEncoding("utf-8").withArgs("config", "commit.gpgSign", "false").withWorkingDir(configRepo.getAbsoluteFile()).runOrBomb(null);
gitAddDotAndCommit(configRepo);
ConsoleResult consoleResult = CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("log").withArg("-1").withArg("--pretty=format:%h").withWorkingDir(configRepo).runOrBomb(null);
Modification modification = new Modification();
modification.setRevision(consoleResult.outputAsString());
return modification;
}
private void gitAddDotAndCommit(File configRepo) {
CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("add").withArg("-A").withArg(".").withWorkingDir(configRepo).runOrBomb(null);
CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("config").withArg("user.email").withArg("go_test@go_test.me").withWorkingDir(configRepo).runOrBomb(null);
CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("config").withArg("user.name").withArg("user").withWorkingDir(configRepo).runOrBomb(null);
CommandLine.createCommandLine("git").withEncoding("utf-8").withArg("commit").withArg("-m").withArg("initial commit").withWorkingDir(configRepo).runOrBomb(null);
}
private ConfigRepoConfig createConfigRepoWithDefaultRules(MaterialConfig materialConfig, String pluginId, String id) {
ConfigRepoConfig config = ConfigRepoConfig.createConfigRepoConfig(materialConfig, pluginId, id);
config.getRules().add(new Allow("refer", "*", "*"));
return config;
}
}
|
|
/*
* Copyright 2016 Naver Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.common.server.bo.codec.stat;
import com.navercorp.pinpoint.common.buffer.Buffer;
import com.navercorp.pinpoint.common.buffer.FixedBuffer;
import com.navercorp.pinpoint.common.server.bo.serializer.stat.AgentStatDecodingContext;
import com.navercorp.pinpoint.common.server.bo.serializer.stat.AgentStatUtils;
import com.navercorp.pinpoint.common.server.bo.stat.AgentStatDataPoint;
import com.navercorp.pinpoint.common.server.bo.stat.AgentStatType;
import org.junit.Assert;
import org.junit.Test;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
/**
* @author HyunGil Jeong
*/
public class AgentStatEncoderTest {
private static final String AGENT_ID = "testAgentId";
private static final long AGENT_START_TIMESTAMP = System.currentTimeMillis();
private static final long COLLECT_INTERVAL = 5000L;
private static final Random RANDOM = new Random();
private AgentStatCodec<TestAgentStat> codec = new TestAgentStatCodec();
private AgentStatEncoder<TestAgentStat> encoder = new AgentStatEncoder<TestAgentStat>(codec);
private AgentStatDecoder<TestAgentStat> decoder = new AgentStatDecoder<TestAgentStat>(Arrays.asList(codec));
@Test
public void stats_should_be_encoded_and_decoded_into_same_value() {
long initialTimestamp = System.currentTimeMillis();
int numStats = RANDOM.nextInt(20) + 1;
List<TestAgentStat> expectedAgentStats = this.createTestAgentStats(initialTimestamp, numStats);
long baseTimestamp = AgentStatUtils.getBaseTimestamp(initialTimestamp);
long timestampDelta = initialTimestamp - baseTimestamp;
ByteBuffer qualifierBuffer = encoder.encodeQualifier(timestampDelta);
ByteBuffer valueBuffer = encoder.encodeValue(expectedAgentStats);
Buffer encodedQualifierBuffer = new FixedBuffer(qualifierBuffer.array());
Buffer encodedValueBuffer = new FixedBuffer(valueBuffer.array());
AgentStatDecodingContext context = new AgentStatDecodingContext();
context.setAgentId(AGENT_ID);
context.setBaseTimestamp(baseTimestamp);
List<TestAgentStat> decodedAgentStats = decode(encodedQualifierBuffer, encodedValueBuffer, context);
verify(expectedAgentStats, decodedAgentStats);
}
private List<TestAgentStat> createTestAgentStats(long initialTimestamp, int numStats) {
List<TestAgentStat> agentStats = new ArrayList<TestAgentStat>(numStats);
for (int i = 0; i < numStats; ++i) {
long timestamp = initialTimestamp + (COLLECT_INTERVAL * i);
TestAgentStat agentStat = new TestAgentStat();
agentStat.setAgentId(AGENT_ID);
agentStat.setStartTimestamp(AGENT_START_TIMESTAMP);
agentStat.setTimestamp(timestamp);
agentStat.setValue(RANDOM.nextLong());
agentStats.add(agentStat);
}
return agentStats;
}
protected void verify(List<TestAgentStat> expectedAgentStats, List<TestAgentStat> actualAgentStats) {
Assert.assertEquals(expectedAgentStats, actualAgentStats);
}
private List<TestAgentStat> decode(Buffer encodedQualifierBuffer, Buffer encodedValueBuffer, AgentStatDecodingContext decodingContext) {
long timestampDelta = decoder.decodeQualifier(encodedQualifierBuffer);
decodingContext.setTimestampDelta(timestampDelta);
return decoder.decodeValue(encodedValueBuffer, decodingContext);
}
private static class TestAgentStatCodec implements AgentStatCodec<TestAgentStat> {
@Override
public byte getVersion() {
return 0;
}
@Override
public void encodeValues(Buffer valueBuffer, List<TestAgentStat> agentStats) {
valueBuffer.putInt(agentStats.size());
for (TestAgentStat agentStat : agentStats) {
valueBuffer.putLong(agentStat.getStartTimestamp());
valueBuffer.putLong(agentStat.getTimestamp());
valueBuffer.putLong(agentStat.getValue());
}
}
@Override
public List<TestAgentStat> decodeValues(Buffer valueBuffer, AgentStatDecodingContext decodingContext) {
int size = valueBuffer.readInt();
List<TestAgentStat> agentStats = new ArrayList<TestAgentStat>(size);
for (int i = 0; i < size; ++i) {
TestAgentStat agentStat = new TestAgentStat();
agentStat.setAgentId(decodingContext.getAgentId());
agentStat.setStartTimestamp(valueBuffer.readLong());
agentStat.setTimestamp(valueBuffer.readLong());
agentStat.setValue(valueBuffer.readLong());
agentStats.add(agentStat);
}
return agentStats;
}
}
private static class TestAgentStat implements AgentStatDataPoint {
private String agentId;
private long startTimestamp;
private long timestamp;
private long value;
@Override
public String getAgentId() {
return this.agentId;
}
@Override
public void setAgentId(String agentId) {
this.agentId = agentId;
}
@Override
public long getStartTimestamp() {
return startTimestamp;
}
@Override
public void setStartTimestamp(long startTimestamp) {
this.startTimestamp = startTimestamp;
}
@Override
public long getTimestamp() {
return this.timestamp;
}
@Override
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
public long getValue() {
return this.value;
}
public void setValue(long value) {
this.value = value;
}
@Override
public AgentStatType getAgentStatType() {
return AgentStatType.UNKNOWN;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TestAgentStat that = (TestAgentStat) o;
if (startTimestamp != that.startTimestamp) return false;
if (timestamp != that.timestamp) return false;
if (value != that.value) return false;
return agentId != null ? agentId.equals(that.agentId) : that.agentId == null;
}
@Override
public int hashCode() {
int result = agentId != null ? agentId.hashCode() : 0;
result = 31 * result + (int) (startTimestamp ^ (startTimestamp >>> 32));
result = 31 * result + (int) (timestamp ^ (timestamp >>> 32));
result = 31 * result + (int) (value ^ (value >>> 32));
return result;
}
@Override
public String toString() {
return "TestAgentStat{" +
"agentId='" + agentId + '\'' +
", startTimestamp=" + startTimestamp +
", timestamp=" + timestamp +
", value=" + value +
'}';
}
}
}
|
|
/**
* The MIT License (MIT)
* <p/>
* Copyright (c) 2015 Bertrand Martel
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p/>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package fr.bouyguestelecom.tv.bboxiot.tvapp;
import android.app.Activity;
import android.app.Dialog;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.graphics.Color;
import android.graphics.Point;
import android.os.Build;
import android.os.Bundle;
import android.os.IBinder;
import android.os.RemoteException;
import android.util.Log;
import android.view.Display;
import android.view.View;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.SeekBar;
import android.widget.TableRow;
import android.widget.TextView;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import fr.bouyguestelecom.tv.bboxiot.IBboxIotService;
import fr.bouyguestelecom.tv.bboxiot.datamodel.SmartProperty;
import fr.bouyguestelecom.tv.bboxiot.datamodel.enums.Functions;
import fr.bouyguestelecom.tv.bboxiot.datamodel.enums.Properties;
import fr.bouyguestelecom.tv.bboxiot.events.EventBuilder;
import fr.bouyguestelecom.tv.bboxiot.events.IGenericEvent;
import fr.bouyguestelecom.tv.bboxiot.events.IotEvent;
import fr.bouyguestelecom.tv.bboxiot.events.enums.EventSubscription;
import fr.bouyguestelecom.tv.bboxiot.events.enums.ScanningAction;
import fr.bouyguestelecom.tv.bboxiot.events.impl.BluetoothStateEvent;
import fr.bouyguestelecom.tv.bboxiot.events.impl.ConnectionEvent;
import fr.bouyguestelecom.tv.bboxiot.events.impl.ScanItemEvent;
import fr.bouyguestelecom.tv.bboxiot.events.impl.ScanStatusChangeEvent;
import fr.bouyguestelecom.tv.bboxiot.events.inter.IPropertyIncomingEvent;
import fr.bouyguestelecom.tv.bboxiot.events.inter.IPropertyResponseEvent;
import fr.bouyguestelecom.tv.bboxiot.protocol.bluetooth.BluetoothSmartDevice;
import fr.bouyguestelecom.tv.bboxiot.protocol.bluetooth.IBluetoothEventListener;
import fr.bouyguestelecom.tv.bboxiot.protocol.bluetooth.connection.BtAssociatedDevice;
import fr.bouyguestelecom.tv.bboxiot.protocol.bluetooth.connection.ConnectionStatus;
/**
* Dotti device management main activity
*
* @author Bertrand Martel
*/
public class BboxIoTActivity extends Activity {
/**
* debug tag
*/
private String TAG = this.getClass().getName();
private IBboxIotService bboxIotService = null;
private Button btStateOnBtn = null;
private Button btStateOffBtn = null;
private Button btScanContinuous = null;
private Button btScanPermanent = null;
private Button btScanPeriodic = null;
private Button btScanStop = null;
private Button btDisassociateAll = null;
private Button btClearScanList = null;
private ListView scanningListview = null;
private ListView associationListview = null;
private ListView connectionEventListView = null;
private AssociationEventAdapter connectionEventListAdapter = null;
private ScanItemArrayAdapter scanningListAdapter = null;
private ConnectionItemArrayAdapter associationListAdapter = null;
private Map<String, BluetoothSmartDevice> scanningList = new HashMap<>();
private Map<String, BtAssociatedDevice> associationList = new HashMap<>();
private List<SmartProperty> propertyList = new ArrayList<>();
private PropertyAdapter propertyAdapter = null;
private Dialog currentDialog = null;
private String currentDeviceUid = "";
private final static int EVENT_MAX_SIZE = 7;
/**
* command task scheduler
*/
private ScheduledExecutorService commandScheduler = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.bboxiot_activity);
commandScheduler = Executors.newSingleThreadScheduledExecutor();
Intent intent = new Intent();
intent.setComponent(ComponentName.unflattenFromString("fr.bouyguestelecom.tv.bboxiot.main/.IotService"));
btStateOnBtn = (Button) findViewById(R.id.bluetooth_state_on);
btStateOffBtn = (Button) findViewById(R.id.bluetooth_state_off);
btScanContinuous = (Button) findViewById(R.id.bluetooth_continuous_scanning_start);
btScanStop = (Button) findViewById(R.id.bluetooth_scan_stop);
btClearScanList = (Button) findViewById(R.id.bluetooth_clear_scan_list);
btScanPermanent = (Button) findViewById(R.id.bluetooth_permanent_scanning_start);
btScanPeriodic = (Button) findViewById(R.id.bluetooth_periodic_scanning_start);
btDisassociateAll = (Button) findViewById(R.id.bluetooth_disassociate_all);
btDisassociateAll.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (bboxIotService != null) {
try {
boolean status = bboxIotService.getBluetoothManager().disassociateAll();
if (status) {
Log.i(TAG, "Disassociate all request success");
refreshAssociationList();
} else {
Log.i(TAG, "Disassociate all request failure");
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
});
btClearScanList.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (bboxIotService != null) {
try {
bboxIotService.getBluetoothManager().clearScanningList();
runOnUiThread(new Runnable() {
@Override
public void run() {
scanningListAdapter.clear();
scanningListAdapter.notifyDataSetChanged();
}
}
);
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
});
btScanPermanent.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
if (bboxIotService != null) {
String request = EventBuilder.buildPermanentScan().toJsonString();
boolean status = bboxIotService.getBluetoothManager().setScanStatus(request);
if (status) {
Log.i(TAG, "Permanent scan request successfully engaged");
} else {
Log.i(TAG, "Permanent scan request failed");
}
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
});
btScanPeriodic.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
if (bboxIotService != null) {
String request = EventBuilder.buildPeriodicScan(10, 50).toJsonString();
boolean status = bboxIotService.getBluetoothManager().setScanStatus(request);
if (status) {
Log.i(TAG, "Periodic scan request successfully engaged");
} else {
Log.i(TAG, "Periodic scan request failed");
}
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
});
btScanContinuous.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
if (bboxIotService != null) {
String request = EventBuilder.buildContinuousScan(10).toJsonString();
boolean status = bboxIotService.getBluetoothManager().setScanStatus(request);
if (status) {
Log.i(TAG, "Continuous scan request successfully engaged");
} else {
Log.i(TAG, "Continuous scan request failed");
}
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
});
btScanStop.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
if (bboxIotService != null) {
String request = EventBuilder.buildStopScan().toJsonString();
boolean status = bboxIotService.getBluetoothManager().setScanStatus(request);
if (status) {
Log.i(TAG, "Stop scan request successfully engaged");
} else {
Log.i(TAG, "Stop scan request failed");
}
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
});
btStateOnBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
Log.i(TAG, "bluetooth state : " + bboxIotService.getBluetoothManager().getBluetoothState());
} catch (RemoteException e) {
e.printStackTrace();
}
try {
if (bboxIotService != null && !bboxIotService.getBluetoothManager().getBluetoothState()) {
boolean status = bboxIotService.getBluetoothManager().setBluetoothState(true);
if (status) {
Log.i(TAG, "Bluetooth set ON request has been sent successfully");
} else {
Log.i(TAG, "Bluetooth set ON request has failed");
}
} else {
Log.i(TAG, "Error service is not defined or bluetooth state is already ON");
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
});
btStateOffBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
try {
Log.i(TAG, "bluetooth state : " + bboxIotService.getBluetoothManager().getBluetoothState());
} catch (RemoteException e) {
e.printStackTrace();
}
if (bboxIotService != null && bboxIotService.getBluetoothManager().getBluetoothState()) {
boolean status = bboxIotService.getBluetoothManager().setBluetoothState(false);
if (status) {
Log.i(TAG, "Bluetooth set OFF request has been sent successfully");
} else {
Log.i(TAG, "Bluetooth set OFF request has failed");
}
} else {
Log.i(TAG, "Error service is not defined or bluetooth state is already OFF");
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
});
btStateOnBtn.setEnabled(false);
btStateOffBtn.setEnabled(false);
initializeScanningList();
initializeAssociationList();
initializeConnectionEventList();
ServiceConnection mServiceConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName componentName, IBinder service) {
Log.i(TAG, "Service IoT has connected");
//get api wrapper
bboxIotService = IBboxIotService.Stub.asInterface(service);
try {
refreshScanningList();
refreshAssociationList();
} catch (RemoteException e) {
e.printStackTrace();
}
try {
try {
Log.i(TAG, "bluetooth state : " + bboxIotService.getBluetoothManager().getBluetoothState());
} catch (RemoteException e) {
e.printStackTrace();
}
if (bboxIotService.getBluetoothManager().getBluetoothState()) {
btStateOnBtn.setEnabled(false);
btStateOffBtn.setEnabled(true);
} else {
btStateOnBtn.setEnabled(true);
btStateOffBtn.setEnabled(false);
}
if (bboxIotService.getBluetoothManager().isScanning()) {
runOnUiThread(new Runnable() {
@Override
public void run() {
btScanContinuous.setEnabled(false);
btScanStop.setEnabled(true);
}
});
} else {
runOnUiThread(new Runnable() {
@Override
public void run() {
btScanContinuous.setEnabled(true);
btScanStop.setEnabled(false);
}
});
}
Set<EventSubscription> registrationSet = new HashSet<>();
registrationSet.add(EventSubscription.BLUETOOTH_STATE);
registrationSet.add(EventSubscription.SCANNING);
registrationSet.add(EventSubscription.CONNECTION);
registrationSet.add(EventSubscription.PROPERTIES);
bboxIotService.getBluetoothManager().subscribe(EventBuilder.buildSubscription(registrationSet).toJsonString(), new IBluetoothEventListener.Stub() {
public void onEventReceived(final String type, final String topic, final String event) {
commandScheduler.execute(new Runnable() {
@Override
public void run() {
Log.i(TAG, "event received => type : " + type + " | topic : " + topic + " | event content : " + event);
IGenericEvent genericEvent = IotEvent.parse(event);
if (genericEvent != null) {
if (genericEvent instanceof BluetoothStateEvent) {
BluetoothStateEvent btEvent = (BluetoothStateEvent) genericEvent;
if (btEvent.getBluetoothState()) {
runOnUiThread(new Runnable() {
@Override
public void run() {
btStateOnBtn.setEnabled(false);
btStateOffBtn.setEnabled(true);
}
});
} else {
runOnUiThread(new Runnable() {
@Override
public void run() {
btStateOnBtn.setEnabled(true);
btStateOffBtn.setEnabled(false);
}
});
}
} else if (genericEvent instanceof ScanStatusChangeEvent) {
ScanStatusChangeEvent btEvent = (ScanStatusChangeEvent) genericEvent;
if (btEvent.getAction() == ScanningAction.SCANNING_ACTION_START) {
runOnUiThread(new Runnable() {
@Override
public void run() {
btScanContinuous.setEnabled(false);
btScanPermanent.setEnabled(false);
btScanPeriodic.setEnabled(false);
btScanStop.setEnabled(true);
}
});
} else if (btEvent.getAction() == ScanningAction.SCANNING_ACTION_STOP) {
runOnUiThread(new Runnable() {
@Override
public void run() {
btScanContinuous.setEnabled(true);
btScanPermanent.setEnabled(true);
btScanPeriodic.setEnabled(true);
btScanStop.setEnabled(false);
}
});
}
} else if (genericEvent instanceof ScanItemEvent) {
final ScanItemEvent btEvent = (ScanItemEvent) genericEvent;
runOnUiThread(new Runnable() {
@Override
public void run() {
if (scanningListAdapter != null) {
scanningListAdapter.add(btEvent.getItem());
scanningListAdapter.notifyDataSetChanged();
}
}
});
} else if (genericEvent instanceof IPropertyResponseEvent) {
Log.i(TAG, "received property response event");
final IPropertyResponseEvent btEvent = (IPropertyResponseEvent) genericEvent;
runOnUiThread(new Runnable() {
@Override
public void run() {
if (connectionEventListAdapter.getCount() > EVENT_MAX_SIZE) {
for (int i = connectionEventListAdapter.getCount() - 1; i >= EVENT_MAX_SIZE; i--) {
connectionEventListAdapter.getDeviceList().remove(i);
}
}
connectionEventListAdapter.insert(new AssociationEventObj(btEvent.getDeviceUid(), btEvent.getProperty().getFunction() + " " + btEvent.getActionType().toString() + " " + btEvent.getStatus().toString() + " " + btEvent.getProperty().getValue()), 0);
connectionEventListAdapter.notifyDataSetChanged();
}
});
associationList.get(btEvent.getDeviceUid()).getDeviceFunctions().get(btEvent.getProperty().getFunction()).put(btEvent.getProperty().getProperty(), btEvent.getProperty());
for (int i = 0; i < propertyList.size(); i++) {
if (propertyList.get(i).getDeviceUid().equals(btEvent.getDeviceUid()) &&
propertyList.get(i).getFunction() == btEvent.getProperty().getFunction() &&
propertyList.get(i).getProperty() == btEvent.getProperty().getProperty()) {
propertyList.set(i, btEvent.getProperty());
runOnUiThread(new Runnable() {
@Override
public void run() {
if (propertyAdapter != null) {
propertyAdapter.notifyDataSetChanged();
}
}
});
}
}
} else if (genericEvent instanceof IPropertyIncomingEvent) {
Log.i(TAG, "received property event");
final IPropertyIncomingEvent btEvent = (IPropertyIncomingEvent) genericEvent;
runOnUiThread(new Runnable() {
@Override
public void run() {
if (connectionEventListAdapter.getCount() > EVENT_MAX_SIZE) {
for (int i = connectionEventListAdapter.getCount() - 1; i >= EVENT_MAX_SIZE; i--) {
connectionEventListAdapter.getDeviceList().remove(i);
}
}
connectionEventListAdapter.insert(new AssociationEventObj(btEvent.getDeviceUid(), btEvent.getProperty().getFunction().toString() + " " + btEvent.getProperty().getProperty().toString() + " " + btEvent.getProperty().getValue().toString()), 0);
connectionEventListAdapter.notifyDataSetChanged();
}
});
associationList.get(btEvent.getDeviceUid()).getDeviceFunctions().get(btEvent.getProperty().getFunction()).put(btEvent.getProperty().getProperty(), btEvent.getProperty());
for (int i = 0; i < propertyList.size(); i++) {
if (propertyList.get(i).getDeviceUid().equals(btEvent.getDeviceUid()) &&
propertyList.get(i).getFunction() == btEvent.getProperty().getFunction() &&
propertyList.get(i).getProperty() == btEvent.getProperty().getProperty()) {
propertyList.set(i, btEvent.getProperty());
runOnUiThread(new Runnable() {
@Override
public void run() {
if (propertyAdapter != null) {
propertyAdapter.notifyDataSetChanged();
}
}
});
}
}
} else if (genericEvent instanceof ConnectionEvent) {
final ConnectionEvent btEvent = (ConnectionEvent) genericEvent;
Log.i(TAG, "received association event : " + btEvent.getState().toString());
if (connectionEventListAdapter != null && btEvent.getConnection() != null) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (connectionEventListAdapter.getCount() > EVENT_MAX_SIZE) {
for (int i = connectionEventListAdapter.getCount() - 1; i >= EVENT_MAX_SIZE; i--) {
connectionEventListAdapter.getDeviceList().remove(i);
}
}
connectionEventListAdapter.insert(new AssociationEventObj(btEvent.getConnection().getDeviceUuid(), btEvent.getState().toString()), 0);
connectionEventListAdapter.notifyDataSetChanged();
}
});
}
try {
switch (btEvent.getState()) {
case ASSOCIATION_COMPLETE: {
if (btEvent.getConnection().getDeviceUuid().equals(currentDeviceUid) && currentDialog != null) {
currentDialog.dismiss();
currentDialog = null;
}
refreshAssociationList();
//refresh scanning list because device entry is no longer present
refreshScanningList();
break;
}
case CONNECTED: {
refreshAssociationList();
if (currentDeviceUid.equals(btEvent.getConnection().getDeviceUuid()) && currentDialog != null) {
runOnUiThread(new Runnable() {
@Override
public void run() {
ImageView view = (ImageView) currentDialog.findViewById(R.id.device_connected_value);
if (view != null)
view.setImageResource(R.drawable.green_circle);
TextView buttonConnect = (TextView) currentDialog.findViewById(R.id.button_connect);
if (buttonConnect != null)
buttonConnect.setEnabled(false);
TextView buttonDisconnect = (TextView) currentDialog.findViewById(R.id.button_disconnect);
if (buttonDisconnect != null)
buttonDisconnect.setEnabled(true);
}
});
}
break;
}
case DISCONNECTED: {
refreshAssociationList();
if (currentDeviceUid.equals(btEvent.getConnection().getDeviceUuid()) && currentDialog != null) {
runOnUiThread(new Runnable() {
@Override
public void run() {
ImageView view = (ImageView) currentDialog.findViewById(R.id.device_connected_value);
if (view != null)
view.setImageResource(R.drawable.red_circle);
TextView buttonConnect = (TextView) currentDialog.findViewById(R.id.button_connect);
if (buttonConnect != null)
buttonConnect.setEnabled(true);
TextView buttonDisconnect = (TextView) currentDialog.findViewById(R.id.button_disconnect);
if (buttonDisconnect != null)
buttonDisconnect.setEnabled(false);
}
});
}
break;
}
case CONNECTION_ERROR: {
break;
}
case DEVICE_NOT_FOUND: {
refreshAssociationList();
break;
}
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
}
});
}
});
} catch (
RemoteException e
)
{
e.printStackTrace();
}
}
@Override
public void onServiceDisconnected(ComponentName componentName) {
Log.i(TAG, "Service IoT has disconnected");
}
};
boolean isBound = getApplicationContext().bindService(intent,
mServiceConnection,
Context.BIND_AUTO_CREATE);
}
private void refreshAssociationList() throws RemoteException {
associationList = IotEvent.parseAssociationList(bboxIotService.getBluetoothManager().getAssociationList()).getList();
Iterator it = associationList.entrySet().iterator();
runOnUiThread(new Runnable() {
@Override
public void run() {
if (associationListAdapter != null) {
associationListAdapter.clear();
Iterator it = associationList.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, BtAssociatedDevice> pair = (Map.Entry) it.next();
associationListAdapter.add(pair.getValue());
}
associationListAdapter.notifyDataSetChanged();
}
}
});
}
private void refreshScanningList() throws RemoteException {
scanningList = IotEvent.parseScanningList(bboxIotService.getBluetoothManager().getScanningList()).getList();
runOnUiThread(new Runnable() {
@Override
public void run() {
if (scanningListAdapter != null) {
scanningListAdapter.clear();
Iterator it = scanningList.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, BluetoothSmartDevice> pair = (Map.Entry) it.next();
scanningListAdapter.add(pair.getValue());
}
scanningListAdapter.notifyDataSetChanged();
}
}
});
}
public static int getScreenWidth(Activity context) {
Display display = context.getWindowManager().getDefaultDisplay();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) {
Point size = new Point();
display.getSize(size);
return size.x;
}
return display.getWidth();
}
public static int getScreenHeight(Activity context) {
Display display = context.getWindowManager().getDefaultDisplay();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) {
Point size = new Point();
display.getSize(size);
return size.y;
}
return display.getHeight();
}
private void initializeAssociationList() {
associationListview = (ListView) findViewById(R.id.connection_list_view);
associationListAdapter = new ConnectionItemArrayAdapter(this,
android.R.layout.simple_list_item_1, new ArrayList<BtAssociatedDevice>());
associationListview.setAdapter(associationListAdapter);
associationListview.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, final View view,
int position, long id) {
final BtAssociatedDevice item = (BtAssociatedDevice) parent.getItemAtPosition(position);
Log.i(TAG, "item selected : " + item.toJson().toString());
final Dialog dialog = new Dialog(BboxIoTActivity.this);
currentDialog = dialog;
currentDeviceUid = item.getDeviceUuid();
dialog.setContentView(R.layout.connection_item);
dialog.getWindow().setLayout(getScreenWidth(BboxIoTActivity.this) / 2
, LinearLayout.LayoutParams.MATCH_PARENT);
dialog.setTitle("Device " + item.getDeviceUuid());
Button buttonBack = (Button) dialog.findViewById(R.id.button_back);
Button buttonConnect = (Button) dialog.findViewById(R.id.button_connect);
Button buttonDisconnect = (Button) dialog.findViewById(R.id.button_disconnect);
Button buttonDisassociate = (Button) dialog.findViewById(R.id.button_disassociate);
TextView supportedDevice = (TextView) dialog.findViewById(R.id.supported_device_name_name_value);
ListView propertiesList = (ListView) dialog.findViewById(R.id.properties_list_view);
TableRow switchStateRow = (TableRow) dialog.findViewById(R.id.properties_on_off_row);
TableRow colorRow = (TableRow) dialog.findViewById(R.id.properties_color_row);
SeekBar intensityBar = (SeekBar) dialog.findViewById(R.id.intensity_seekbar);
SeekBar freqMeasurementSeekbar = (SeekBar) dialog.findViewById(R.id.frequency_measurement_seekbar);
propertyList = new ArrayList<SmartProperty>();
Iterator it = item.getDeviceFunctions().entrySet().iterator();
while (it.hasNext()) {
Map.Entry<Functions, HashMap<Properties, SmartProperty>> pair = (Map.Entry) it.next();
Iterator it2 = pair.getValue().entrySet().iterator();
while (it2.hasNext()) {
Map.Entry<Properties, SmartProperty> pair2 = (Map.Entry) it2.next();
if (pair.getKey() == Functions.SWITCH && pair2.getValue().getProperty() == Properties.ONOFF) {
switchStateRow.setVisibility(View.VISIBLE);
}
if (pair.getKey() == Functions.RGB_LED && pair2.getValue().getProperty() == Properties.COLOR) {
colorRow.setVisibility(View.VISIBLE);
}
if (pair.getKey() == Functions.RGB_LED && pair2.getValue().getProperty() == Properties.INTENSITY) {
intensityBar.setVisibility(View.VISIBLE);
}
if (pair.getKey() == Functions.SMART_METER && pair2.getValue().getProperty() == Properties.FREQUENCY_MEASUREMENT) {
freqMeasurementSeekbar.setVisibility(View.VISIBLE);
}
propertyList.add(pair2.getValue());
}
}
if (switchStateRow.getVisibility() == View.VISIBLE) {
Button onButton = (Button) dialog.findViewById(R.id.switch_state_on);
Button offButton = (Button) dialog.findViewById(R.id.switch_state_off);
onButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (bboxIotService != null) {
try {
String pushRequest = EventBuilder.buildPushRequest(item.getDeviceFunctions().get(Functions.SWITCH).get(Properties.ONOFF), true);
boolean status = bboxIotService.getBluetoothManager().pushValue(pushRequest);
if (!status)
Log.e(TAG, "push request has failed");
else
Log.i(TAG, "push request sent");
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
});
offButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (bboxIotService != null) {
try {
String pushRequest = EventBuilder.buildPushRequest(item.getDeviceFunctions().get(Functions.SWITCH).get(Properties.ONOFF), false);
boolean status = bboxIotService.getBluetoothManager().pushValue(pushRequest);
if (!status)
Log.e(TAG, "push request has failed");
else
Log.i(TAG, "push request sent");
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
});
}
if (colorRow.getVisibility() == View.VISIBLE) {
Button redButton = (Button) dialog.findViewById(R.id.color_red);
Button greenButton = (Button) dialog.findViewById(R.id.color_green);
Button blueButton = (Button) dialog.findViewById(R.id.color_blue);
Button whiteButton = (Button) dialog.findViewById(R.id.color_white);
redButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
setColor(item, Color.RED);
}
});
greenButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
setColor(item, Color.GREEN);
}
});
blueButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
setColor(item, Color.BLUE);
}
});
whiteButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
setColor(item, Color.WHITE);
}
});
}
if (freqMeasurementSeekbar.getVisibility() == View.VISIBLE) {
freqMeasurementSeekbar.setProgress(item.getDeviceFunctions().get(Functions.SMART_METER).get(Properties.FREQUENCY_MEASUREMENT).getIntValue());
freqMeasurementSeekbar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (bboxIotService != null) {
try {
String pushRequest = EventBuilder.buildPushRequest(item.getDeviceFunctions().get(Functions.SMART_METER).get(Properties.FREQUENCY_MEASUREMENT), progress);
boolean status = bboxIotService.getBluetoothManager().pushValue(pushRequest);
if (!status)
Log.e(TAG, "push request has failed");
else
Log.i(TAG, "push request sent");
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
}
if (intensityBar.getVisibility() == View.VISIBLE) {
intensityBar.setProgress(item.getDeviceFunctions().get(Functions.RGB_LED).get(Properties.INTENSITY).getIntValue());
intensityBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (bboxIotService != null) {
try {
String pushRequest = EventBuilder.buildPushRequest(item.getDeviceFunctions().get(Functions.RGB_LED).get(Properties.INTENSITY), progress);
boolean status = bboxIotService.getBluetoothManager().pushValue(pushRequest);
if (!status)
Log.e(TAG, "push request has failed");
else
Log.i(TAG, "push request sent");
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
}
propertyAdapter = new PropertyAdapter(BboxIoTActivity.this,
android.R.layout.simple_list_item_1, propertyList);
propertiesList.setAdapter(propertyAdapter);
propertiesList.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, final View view,
int position, long id) {
final SmartProperty item = (SmartProperty) parent.getItemAtPosition(position);
if (bboxIotService != null) {
try {
String pullRequest = EventBuilder.buildPullRequest(item);
boolean status = bboxIotService.getBluetoothManager().pullValue(pullRequest);
if (!status)
Log.e(TAG, "pull request has failed");
else
Log.i(TAG, "pull request sent");
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
});
supportedDevice.setText(item.getBtSmartDevice().getGenericDevice().getSupportedDevice().toString());
ImageView connectedValue = (ImageView) dialog.findViewById(R.id.device_connected_value);
TextView deviceUidVal = (TextView) dialog.findViewById(R.id.device_uid_value);
deviceUidVal.setText(item.getDeviceUuid());
TextView deviceNameList = (TextView) dialog.findViewById(R.id.device_name_list_value);
TextView deviceUp = (TextView) dialog.findViewById(R.id.device_up_value);
deviceUp.setText("" + item.getBtSmartDevice().isUp());
TextView deviceMode = (TextView) dialog.findViewById(R.id.device_mode_value);
deviceMode.setText(item.getBtSmartDevice().getDeviceMode().toString());
TextView lastActivityDate = (TextView) dialog.findViewById(R.id.last_activity_date_value);
Date lastDate = new Date(item.getBtSmartDevice().getLastActivityTime());
DateFormat df = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
lastActivityDate.setText(df.format(lastDate).toString());
TextView deviceAddress = (TextView) dialog.findViewById(R.id.device_address_value);
deviceAddress.setText(item.getBtSmartDevice().getDeviceAddress());
if (item.isConnected()) {
connectedValue.setImageResource(R.drawable.green_circle);
buttonConnect.setEnabled(false);
buttonDisconnect.setEnabled(true);
} else {
connectedValue.setImageResource(R.drawable.red_circle);
buttonConnect.setEnabled(true);
buttonDisconnect.setEnabled(false);
}
buttonBack.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
buttonDisassociate.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.i(TAG, "disassociating device " + item.getDeviceUuid());
if (bboxIotService != null) {
try {
boolean status = bboxIotService.getBluetoothManager().disassociateDevice(item.getDeviceUuid());
if (status) {
Log.i(TAG, "Disassociate request successfully initiated");
refreshAssociationList();
dialog.dismiss();
} else
Log.i(TAG, "Disassociate request failure");
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
});
buttonConnect.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.i(TAG, "connecting device " + item.getDeviceUuid());
if (bboxIotService != null) {
try {
String status = bboxIotService.getBluetoothManager().connect(item.getDeviceUuid());
switch (ConnectionStatus.getConnectionStatusStr(status)) {
case CONNECTION_SUCCESS: {
Log.i(TAG, "Connection initiated");
break;
}
case CONNECTION_FAILURE: {
Log.i(TAG, "Connection failure");
break;
}
case CONNECTION_WAITING: {
Log.i(TAG, "Connection waiting");
break;
}
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
});
buttonDisconnect.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.i(TAG, "disconnecting device " + item.getDeviceUuid());
if (bboxIotService != null) {
try {
boolean status = bboxIotService.getBluetoothManager().disconnect(item.getDeviceUuid());
if (status)
Log.i(TAG, "Disconnection successfully initiated");
else
Log.e(TAG, "Disconnection request failure");
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
});
dialog.show();
}
});
}
private void setColor(BtAssociatedDevice connection, int color) {
if (bboxIotService != null) {
try {
if (connection.getDeviceFunctions().containsKey(Functions.RGB_LED) && connection.getDeviceFunctions().get(Functions.RGB_LED).containsKey(Properties.COLOR)) {
String pushRequest = EventBuilder.buildPushRequest(connection.getDeviceFunctions().get(Functions.RGB_LED).get(Properties.COLOR), color);
boolean status = bboxIotService.getBluetoothManager().pushValue(pushRequest);
if (!status)
Log.e(TAG, "push request has failed");
else
Log.i(TAG, "push request sent");
} else {
Log.e(TAG, "error : function RGB_LED or property COLOR not found");
}
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
private void initializeConnectionEventList() {
connectionEventListView = (ListView) findViewById(R.id.connection_event_list_view);
connectionEventListAdapter = new AssociationEventAdapter(this,
android.R.layout.simple_list_item_1, new ArrayList<AssociationEventObj>());
connectionEventListView.setAdapter(connectionEventListAdapter);
}
private void initializeScanningList() {
scanningListview = (ListView) findViewById(R.id.scanning_list_view);
scanningListAdapter = new ScanItemArrayAdapter(this,
android.R.layout.simple_list_item_1, new ArrayList<BluetoothSmartDevice>());
scanningListview.setAdapter(scanningListAdapter);
scanningListview.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, final View view,
int position, long id) {
final BluetoothSmartDevice item = (BluetoothSmartDevice) parent.getItemAtPosition(position);
final Dialog dialog = new Dialog(BboxIoTActivity.this);
dialog.setContentView(R.layout.scanning_item);
dialog.setTitle("Device " + item.getDeviceUuid());
Button buttonBack = (Button) dialog.findViewById(R.id.button_back);
Button buttonAssociate = (Button) dialog.findViewById(R.id.button_associate);
TextView supportedDeviceName = (TextView) dialog.findViewById(R.id.supported_device_name_value);
supportedDeviceName.setText(item.getGenericDevice().getSupportedDevice().toString());
TextView deviceUidVal = (TextView) dialog.findViewById(R.id.device_uid_value);
deviceUidVal.setText(item.getDeviceUuid());
TextView deviceNameList = (TextView) dialog.findViewById(R.id.device_name_list_value);
TextView deviceUp = (TextView) dialog.findViewById(R.id.device_up_value);
deviceUp.setText("" + item.isUp());
TextView manufacturerDataFilter = (TextView) dialog.findViewById(R.id.manufacturer_data_filter_value);
String manufacturerDataFilterVals = "[";
for (int i = 0; i < item.getManufacturerData().length; i++) {
manufacturerDataFilterVals += item.getManufacturerData()[i] + " , ";
}
if (!manufacturerDataFilterVals.equals("[")) {
manufacturerDataFilterVals = manufacturerDataFilterVals.substring(0, manufacturerDataFilterVals.length() - 3) +
" ]";
manufacturerDataFilter.setText(manufacturerDataFilterVals);
} else {
manufacturerDataFilter.setText("[ ]");
}
TextView lastActivityDate = (TextView) dialog.findViewById(R.id.last_activity_date_value);
Date lastDate = new Date(item.getLastActivityTime());
DateFormat df = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
lastActivityDate.setText(df.format(lastDate).toString());
TextView deviceMode = (TextView) dialog.findViewById(R.id.device_mode_value);
deviceMode.setText(item.getDeviceMode().toString());
TextView deviceAddress = (TextView) dialog.findViewById(R.id.device_address_value);
deviceAddress.setText(item.getDeviceAddress());
String deviceNames = "";
for (int i = 0; i < item.getDeviceNameList().size(); i++) {
deviceNames += "\"" + item.getDeviceNameList().get(i) + "\"" + ",";
}
if (!deviceNames.equals("")) {
deviceNameList.setText(deviceNames.substring(0, deviceNames.length() - 1));
}
buttonBack.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
buttonAssociate.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.i(TAG, "associating device " + item.getDeviceUuid());
if (bboxIotService != null) {
try {
currentDialog = dialog;
currentDeviceUid = item.getDeviceUuid();
boolean status = bboxIotService.getBluetoothManager().associateDevice(item.getDeviceUuid());
if (status) {
Log.i(TAG, "Association successfully initiated");
} else
Log.e(TAG, "Association request failure");
} catch (RemoteException e) {
e.printStackTrace();
}
}
}
});
dialog.show();
}
});
}
@Override
protected void onDestroy() {
Log.i(TAG, "onDestroy()");
super.onDestroy();
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void onBackPressed() {
super.onBackPressed();
}
@Override
protected void onPause() {
super.onPause();
}
}
|
|
/*
* Copyright (c) 2017. Team CMPUT301F17T02, CMPUT301, University of Alberta - All Rights Reserved. You may use, distribute, or modify this code under terms and conditions of the Code of Students Behaviour at University of Alberta.
*/
package com.example.baard.Friends;
/**
* Created by randi on 23/11/17.
*/
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.support.annotation.LayoutRes;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import com.example.baard.Controllers.ElasticSearchController;
import com.example.baard.Controllers.FileController;
import com.example.baard.Entities.User;
import com.example.baard.Entities.UserList;
import com.example.baard.R;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException;
/**
* Finds all users in the database and displays them for the user to choose who to follow
* @see ElasticSearchController
* @see FileController
* @author rderbysh
* @since 1.0
*/
public class FindFriendsFragment extends Fragment {
private ListView findFriendsView;
private MyFriendsListAdapter adapter;
private String username;
private User user;
private FileController fileController;
private ElasticSearchController.GetAllUsersTask getAllUsersTask = new ElasticSearchController.GetAllUsersTask();
private UserList allUsers = new UserList();
private HashMap<String, Boolean> myFriends;
private HashMap<String, String> userMap = new HashMap<String, String>();
private ArrayList<String> acceptedFriendsList, pendingFriendsList;
/**
* Sets up the find friends view
* @param inflater
* @param container
* @param savedInstanceState
* @return
*/
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_find_friends, container, false);
getAllUsersTask.execute();
findFriendsView = (ListView) rootView.findViewById(R.id.findFriendsView);
fileController = new FileController();
SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(getActivity().getApplicationContext());
Gson gson = new Gson();
String json = sharedPrefs.getString("username", "");
username = gson.fromJson(json, new TypeToken<String>() {}.getType());
return rootView;
}
/**
* Called when FindFriendsFragment activity is opened up and called again.
*/
@Override
public void onResume() {
super.onResume();
user = fileController.loadUser(getActivity().getApplicationContext(), username);
myFriends = user.getFriends();
try {
allUsers = getAllUsersTask.get();
for (User aUser : allUsers.getArrayList()) {
userMap.put(aUser.getUsername(), aUser.getName());
if (user.getUsername().equals(aUser.getUsername())) {
allUsers.delete(aUser);
break;
}
}
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
Collections.sort(allUsers.getArrayList());
adapter = new MyFriendsListAdapter(this.getContext(), R.layout.friend_list_item, allUsers);
findFriendsView.setAdapter(adapter);
adapter.notifyDataSetChanged();
}
/**
* List adapter for displaying the changing buttons depending on the state on which a user
* is following another user. (Follow, Pending, Following)
*/
private class MyFriendsListAdapter extends ArrayAdapter<User> {
private int layout;
public MyFriendsListAdapter(@NonNull Context context, @LayoutRes int resource, @NonNull UserList objects) {
super(context, resource, objects.getArrayList());
layout = resource;
}
@NonNull
@Override
public View getView(final int position, @Nullable View convertView, @NonNull ViewGroup parent) {
ViewHolder mainViewHolder = null;
if (convertView == null) {
LayoutInflater inflater = LayoutInflater.from(getContext());
convertView = inflater.inflate(layout, parent, false);
final ViewHolder viewHolder = new ViewHolder();
acceptedFriendsList = getKeysByValue(myFriends, Boolean.TRUE);
pendingFriendsList = getKeysByValue(myFriends, Boolean.FALSE);
viewHolder.title = (TextView) convertView.findViewById(R.id.addFriend);
viewHolder.button = (Button) convertView.findViewById(R.id.addFriendButton);
viewHolder.button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (viewHolder.button.getText() == "FOLLOWING") {
notifyDataSetChanged();
} else if (viewHolder.button.getText() == "PENDING") {
notifyDataSetChanged();
}
else {
viewHolder.button.setText("PENDING");
User friend = getItem(position);
System.out.println("Got friend: " + friend);
Boolean test = fileController.sendFriendRequest(getContext(), username, friend.getUsername());
if (test) { System.out.println("True: Sent to server"); }
user = fileController.loadUser(getContext(), username);
notifyDataSetChanged();
}
}
});
convertView.setTag(viewHolder);
}
mainViewHolder = (ViewHolder) convertView.getTag();
mainViewHolder.title.setText(getItem(position).getName());
if (!acceptedFriendsList.isEmpty()) {
for (int i = 0; i < acceptedFriendsList.size(); i++) {
String name = userMap.get(acceptedFriendsList.get(i));
if (name == mainViewHolder.title.getText()) {
mainViewHolder.button.setText("FOLLOWING");
updateView(position);
}
}
}
if (!pendingFriendsList.isEmpty()) {
for (int j = 0; j < pendingFriendsList.size(); j++) {
String name = userMap.get(pendingFriendsList.get(j));
if (name == mainViewHolder.title.getText()) {
mainViewHolder.button.setText("PENDING");
updateView(position);
}
}
}
return convertView;
}
}
public class ViewHolder {
TextView title;
Button button;
}
private void updateView(int index){
View v = findFriendsView.getChildAt(index -
findFriendsView.getFirstVisiblePosition());
if(v == null)
return;
Button followingButton = (Button) v.findViewById(R.id.addFriendButton);
followingButton.setText("FOLLOWING");
}
public static <T, V> ArrayList<T> getKeysByValue(Map<T, V> map, V value) {
ArrayList<T> keys = new ArrayList<>();
for (Map.Entry<T, V> entry : map.entrySet()) {
if (value.equals(entry.getValue())) {
keys.add(entry.getKey());
}
}
return keys;
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.PointRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.search.MatchQuery;
import org.elasticsearch.index.search.MatchQuery.Type;
import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.hamcrest.Matcher;
import java.io.IOException;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import static org.hamcrest.CoreMatchers.either;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuilder> {
@Override
protected MatchQueryBuilder doCreateTestQueryBuilder() {
String fieldName = randomFrom(STRING_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME,
DOUBLE_FIELD_NAME, DATE_FIELD_NAME);
if (fieldName.equals(DATE_FIELD_NAME)) {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
}
Object value;
if (fieldName.equals(STRING_FIELD_NAME)) {
int terms = randomIntBetween(0, 3);
StringBuilder builder = new StringBuilder();
for (int i = 0; i < terms; i++) {
builder.append(randomAsciiOfLengthBetween(1, 10)).append(" ");
}
value = builder.toString().trim();
} else {
value = getRandomValueForFieldName(fieldName);
}
MatchQueryBuilder matchQuery = new MatchQueryBuilder(fieldName, value);
matchQuery.operator(randomFrom(Operator.values()));
if (randomBoolean()) {
if (fieldName.equals(DATE_FIELD_NAME)) {
// tokenized dates would trigger parse errors
matchQuery.analyzer(randomFrom("keyword", "whitespace"));
} else {
matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace"));
}
}
if (fieldName.equals(STRING_FIELD_NAME) && randomBoolean()) {
matchQuery.fuzziness(randomFuzziness(fieldName));
}
if (randomBoolean()) {
matchQuery.prefixLength(randomIntBetween(0, 10));
}
if (randomBoolean()) {
matchQuery.maxExpansions(randomIntBetween(1, 1000));
}
if (randomBoolean()) {
matchQuery.minimumShouldMatch(randomMinimumShouldMatch());
}
if (randomBoolean()) {
matchQuery.fuzzyRewrite(getRandomRewriteMethod());
}
if (randomBoolean()) {
matchQuery.fuzzyTranspositions(randomBoolean());
}
if (randomBoolean()) {
matchQuery.lenient(randomBoolean());
}
if (randomBoolean()) {
matchQuery.zeroTermsQuery(randomFrom(MatchQuery.ZeroTermsQuery.values()));
}
if (randomBoolean()) {
matchQuery.cutoffFrequency((float) 10 / randomIntBetween(1, 100));
}
return matchQuery;
}
@Override
protected Map<String, MatchQueryBuilder> getAlternateVersions() {
Map<String, MatchQueryBuilder> alternateVersions = new HashMap<>();
MatchQueryBuilder matchQuery = new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
String contentString = "{\n" +
" \"match\" : {\n" +
" \"" + matchQuery.fieldName() + "\" : \"" + matchQuery.value() + "\"\n" +
" }\n" +
"}";
alternateVersions.put(contentString, matchQuery);
return alternateVersions;
}
@Override
protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
assertThat(query, notNullValue());
if (query instanceof MatchAllDocsQuery) {
assertThat(queryBuilder.zeroTermsQuery(), equalTo(ZeroTermsQuery.ALL));
return;
}
switch (queryBuilder.type()) {
case BOOLEAN:
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class))
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)).or(instanceOf(MatchNoDocsQuery.class))
.or(instanceOf(PointRangeQuery.class)).or(instanceOf(IndexOrDocValuesQuery.class)));
break;
case PHRASE:
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class))
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class))
.or(instanceOf(PointRangeQuery.class)).or(instanceOf(IndexOrDocValuesQuery.class)));
break;
case PHRASE_PREFIX:
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class))
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class))
.or(instanceOf(PointRangeQuery.class)).or(instanceOf(IndexOrDocValuesQuery.class)));
break;
}
QueryShardContext context = searchContext.getQueryShardContext();
MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName());
if (query instanceof TermQuery && fieldType != null) {
String queryValue = queryBuilder.value().toString();
if (queryBuilder.analyzer() == null || queryBuilder.analyzer().equals("simple")) {
queryValue = queryValue.toLowerCase(Locale.ROOT);
}
Query expectedTermQuery = fieldType.termQuery(queryValue, context);
assertEquals(expectedTermQuery, query);
}
if (query instanceof BooleanQuery) {
BooleanQuery bq = (BooleanQuery) query;
if (queryBuilder.minimumShouldMatch() != null) {
// calculate expected minimumShouldMatch value
int optionalClauses = 0;
for (BooleanClause c : bq.clauses()) {
if (c.getOccur() == BooleanClause.Occur.SHOULD) {
optionalClauses++;
}
}
int msm = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch());
assertThat(bq.getMinimumNumberShouldMatch(), equalTo(msm));
}
if (queryBuilder.analyzer() == null && queryBuilder.value().toString().length() > 0) {
assertEquals(bq.clauses().size(), queryBuilder.value().toString().split(" ").length);
}
}
if (query instanceof ExtendedCommonTermsQuery) {
assertTrue(queryBuilder.cutoffFrequency() != null);
ExtendedCommonTermsQuery ectq = (ExtendedCommonTermsQuery) query;
assertEquals(queryBuilder.cutoffFrequency(), ectq.getMaxTermFrequency(), Float.MIN_VALUE);
}
if (query instanceof FuzzyQuery) {
assertTrue(queryBuilder.fuzziness() != null);
FuzzyQuery fuzzyQuery = (FuzzyQuery) query;
// depending on analyzer being set or not we can have term lowercased along the way, so to simplify test we just
// compare lowercased terms here
String originalTermLc = queryBuilder.value().toString().toLowerCase(Locale.ROOT);
String actualTermLc = fuzzyQuery.getTerm().text().toLowerCase(Locale.ROOT);
Matcher<String> termLcMatcher = equalTo(originalTermLc);
if ("false".equals(originalTermLc) || "true".equals(originalTermLc)) {
// Booleans become t/f when querying a boolean field
termLcMatcher = either(termLcMatcher).or(equalTo(originalTermLc.substring(0, 1)));
}
assertThat(actualTermLc, termLcMatcher);
assertThat(queryBuilder.prefixLength(), equalTo(fuzzyQuery.getPrefixLength()));
assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions()));
}
if (query instanceof PointRangeQuery) {
// TODO
}
}
public void testIllegalValues() {
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchQueryBuilder(null, "value"));
assertEquals("[match] requires fieldName", e.getMessage());
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchQueryBuilder("fieldName", null));
assertEquals("[match] requires query value", e.getMessage());
}
MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text");
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.prefixLength(-1));
assertEquals("[match] requires prefix length to be non-negative.", e.getMessage());
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> matchQuery.maxExpansions(randomIntBetween(-10, 0)));
assertEquals("[match] requires maxExpansions to be positive.", e.getMessage());
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.operator(null));
assertEquals("[match] requires operator to be non-null", e.getMessage());
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.type(null));
assertEquals("[match] requires type to be non-null", e.getMessage());
}
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.zeroTermsQuery(null));
assertEquals("[match] requires zeroTermsQuery to be non-null", e.getMessage());
}
matchQuery.analyzer("bogusAnalyzer");
{
QueryShardException e = expectThrows(QueryShardException.class, () -> matchQuery.toQuery(createShardContext()));
assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found"));
}
}
public void testSimpleMatchQuery() throws IOException {
String json = "{\n" +
" \"match\" : {\n" +
" \"message\" : {\n" +
" \"query\" : \"to be or not to be\",\n" +
" \"operator\" : \"AND\",\n" +
" \"prefix_length\" : 0,\n" +
" \"max_expansions\" : 50,\n" +
" \"fuzzy_transpositions\" : true,\n" +
" \"lenient\" : false,\n" +
" \"zero_terms_query\" : \"ALL\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
"}";
MatchQueryBuilder qb = (MatchQueryBuilder) parseQuery(json);
checkGeneratedJson(json, qb);
assertEquals(json, "to be or not to be", qb.value());
assertEquals(json, Operator.AND, qb.operator());
}
public void testLegacyMatchPhrasePrefixQuery() throws IOException {
MatchQueryBuilder expectedQB = new MatchQueryBuilder("message", "to be or not to be");
expectedQB.type(Type.PHRASE_PREFIX);
expectedQB.slop(2);
expectedQB.maxExpansions(30);
String json = "{\n" +
" \"match\" : {\n" +
" \"message\" : {\n" +
" \"query\" : \"to be or not to be\",\n" +
" \"type\" : \"phrase_prefix\",\n" +
" \"operator\" : \"OR\",\n" +
" \"slop\" : 2,\n" +
" \"prefix_length\" : 0,\n" +
" \"max_expansions\" : 30,\n" +
" \"fuzzy_transpositions\" : true,\n" +
" \"lenient\" : false,\n" +
" \"zero_terms_query\" : \"NONE\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
"}";
MatchQueryBuilder qb = (MatchQueryBuilder) parseQuery(json);
checkGeneratedJson(json, qb);
assertEquals(json, expectedQB, qb);
assertSerialization(qb);
assertWarnings("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]",
"Deprecated field [slop] used, replaced by [match_phrase query]");
}
public void testLegacyMatchPhraseQuery() throws IOException {
MatchQueryBuilder expectedQB = new MatchQueryBuilder("message", "to be or not to be");
expectedQB.type(Type.PHRASE);
expectedQB.slop(2);
String json = "{\n" +
" \"match\" : {\n" +
" \"message\" : {\n" +
" \"query\" : \"to be or not to be\",\n" +
" \"type\" : \"phrase\",\n" +
" \"operator\" : \"OR\",\n" +
" \"slop\" : 2,\n" +
" \"prefix_length\" : 0,\n" +
" \"max_expansions\" : 50,\n" +
" \"fuzzy_transpositions\" : true,\n" +
" \"lenient\" : false,\n" +
" \"zero_terms_query\" : \"NONE\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
"}";
MatchQueryBuilder qb = (MatchQueryBuilder) parseQuery(json);
checkGeneratedJson(json, qb);
assertEquals(json, expectedQB, qb);
assertSerialization(qb);
assertWarnings("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]",
"Deprecated field [slop] used, replaced by [match_phrase query]");
}
public void testFuzzinessOnNonStringField() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
MatchQueryBuilder query = new MatchQueryBuilder(INT_FIELD_NAME, 42);
query.fuzziness(randomFuzziness(INT_FIELD_NAME));
QueryShardContext context = createShardContext();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> query.toQuery(context));
assertEquals("Can only use fuzzy queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
e.getMessage());
query.analyzer("keyword"); // triggers a different code path
e = expectThrows(IllegalArgumentException.class,
() -> query.toQuery(context));
assertEquals("Can only use fuzzy queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
e.getMessage());
query.lenient(true);
query.toQuery(context); // no exception
query.analyzer(null);
query.toQuery(context); // no exception
}
public void testExactOnUnsupportedField() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
MatchQueryBuilder query = new MatchQueryBuilder(GEO_POINT_FIELD_NAME, "2,3");
QueryShardContext context = createShardContext();
QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(context));
assertEquals("Geo fields do not support exact searching, use dedicated geo queries instead: [mapped_geo_point]", e.getMessage());
query.lenient(true);
query.toQuery(context); // no exception
}
public void testParseFailsWithMultipleFields() throws IOException {
String json = "{\n" +
" \"match\" : {\n" +
" \"message1\" : {\n" +
" \"query\" : \"this is a test\"\n" +
" },\n" +
" \"message2\" : {\n" +
" \"query\" : \"this is a test\"\n" +
" }\n" +
" }\n" +
"}";
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
assertEquals("[match] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage());
String shortJson = "{\n" +
" \"match\" : {\n" +
" \"message1\" : \"this is a test\",\n" +
" \"message2\" : \"this is a test\"\n" +
" }\n" +
"}";
e = expectThrows(ParsingException.class, () -> parseQuery(shortJson));
assertEquals("[match] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage());
}
public void testParseFailsWithTermsArray() throws Exception {
String json1 = "{\n" +
" \"match\" : {\n" +
" \"message1\" : {\n" +
" \"query\" : [\"term1\", \"term2\"]\n" +
" }\n" +
" }\n" +
"}";
expectThrows(ParsingException.class, () -> parseQuery(json1));
String json2 = "{\n" +
" \"match\" : {\n" +
" \"message1\" : [\"term1\", \"term2\"]\n" +
" }\n" +
"}";
expectThrows(IllegalStateException.class, () -> parseQuery(json2));
}
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
mapperService.merge("t_boost", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("t_boost",
"string_boost", "type=text,boost=4").string()), MapperService.MergeReason.MAPPING_UPDATE, false);
}
public void testMatchPhrasePrefixWithBoost() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
QueryShardContext context = createShardContext();
assumeTrue("test runs only when the index version is on or after V_5_0_0_alpha1",
context.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1));
{
// field boost is applied on a single term query
MatchPhrasePrefixQueryBuilder builder = new MatchPhrasePrefixQueryBuilder("string_boost", "foo");
Query query = builder.toQuery(context);
assertThat(query, instanceOf(BoostQuery.class));
assertThat(((BoostQuery) query).getBoost(), equalTo(4f));
Query innerQuery = ((BoostQuery) query).getQuery();
assertThat(innerQuery, instanceOf(MultiPhrasePrefixQuery.class));
}
{
// field boost is ignored on phrase query
MatchPhrasePrefixQueryBuilder builder = new MatchPhrasePrefixQueryBuilder("string_boost", "foo bar");
Query query = builder.toQuery(context);
assertThat(query, instanceOf(MultiPhrasePrefixQuery.class));
}
}
}
|
|
package com.github.ponkin.bloom;
import java.io.Closeable;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Bit array implementation with
* pre-defined bucket size. Bucket can contain
* several items. Each item must have pre defined
* length in bits.
* <p>
* BitArray is actually particular case of BucketSet with
* the following parameters: <code>bitsPerTag=1</code>,
* <code>tagsPerBucket=1</code>
*
* @author Alexey Ponkin
*
*/
class BucketSet implements Closeable {
private static final Logger log = Logger.getLogger(BucketSet.class.getName());
/*
* masks for tags - masks[i]
* where MASKS[i] - long with all zeros except 1 in i position
*/
private static final long[] MASKS = {
0L, 1L, 1L << 1, 1L << 2, 1L << 3, 1L << 4, 1L << 5, 1L << 6, 1L << 7, 1L << 8,
1L << 9, 1L << 10, 1L << 11, 1L << 12, 1L << 13, 1L << 14, 1L << 15, 1L << 16,
1L << 17, 1L << 18, 1L << 19, 1L << 20, 1L << 21, 1L << 22, 1L << 23, 1L << 24,
1L << 25, 1L << 26, 1L << 27, 1L << 28, 1L << 29, 1L << 30, 1L << 31, 1L << 32,
1L << 33, 1L << 34, 1L << 35, 1L << 36, 1L << 37, 1L << 38, 1L << 39, 1L << 40,
1L << 41, 1L << 42, 1L << 43, 1L << 44, 1L << 45, 1L << 46, 1L << 47, 1L << 48,
1L << 49, 1L << 50, 1L << 51, 1L << 52, 1L << 53, 1L << 54, 1L << 55, 1L << 56,
1L << 57, 1L << 58, 1L << 59, 1L << 60, 1L << 61, 1L << 62, 1L << 63, 1L << 64,
};
private final int tagsPerBucket;
private final int bytesPerBucket;
private final long numBuckets;
private final int bitsPerTag;
private final BitSet bitset;
/**
* Constructor will not check that underlying
* bitset length is enough to keep that many items
*
* @param bitsPerTag how many bits to use per item
* @param tagsPerBucket how many items can be inside one bucket
* @param numBuckets number of buckets
*/
public BucketSet(int bitsPerTag, int tagsPerBucket, long numBuckets, BitSet bitset) {
this.bitset = bitset;
this.bitsPerTag = bitsPerTag;
this.tagsPerBucket = tagsPerBucket;
bytesPerBucket = (bitsPerTag * tagsPerBucket + 7) >> 3;
this.numBuckets = numBuckets;
log.log(Level.FINE,
String.format(
"Bucket set: %1$d buckets, %2$d tags per bucket, %3$d bits per tag, %4$d total bits",
numBuckets, tagsPerBucket, bitsPerTag, bitset.bitSize()));
}
/**
* Append tag to bucket if there is free space
* If tag is already in the bucket just return true
*
* @param bucketIdx target bucket index
* @param tag tag to append
* @return true if there is free space in bucket and
* tag was successfully appended, or tag is already inside bucket, false otherwise
*/
public boolean append(long bucketIdx, long tag) {
boolean result = true;
if(checkTag(bucketIdx, tag) == -1) { // no tag exists in the bucket
int pos = getFreePosInBucket(bucketIdx);
if(pos > -1) {
writeTag(bucketIdx, pos, tag);
result = true;
} else {
result = false; // no free space avaialable
}
}
return result;
}
/**
* Calculate position of tag inside underlying
* bit vector
*
* @param bucketIdx target bucket index
* @param posInBucket position inside target bucket
* @return index of first bit inside underlying bit vector
*/
private final long startPos(long bucketIdx, int posInBucket) {
return bucketIdx*tagsPerBucket*bitsPerTag + posInBucket*bitsPerTag;
}
/**
* Overwrite tag with zeros inside underlying bit vector
* which is basically a delete operation.
*
* @param bucketIdx target bucket index
* @param posInBucket target position in the bucket
*/
public void deleteTag(long bucketIdx, int posInBucket) {
writeTag(bucketIdx, posInBucket, 0L);
}
/**
* Put tag in concrete bucket and position
* overwriting previous value
*
* @param bucketIdx target bucket index
* @param posInBucket position inside bucket
* @param tag value
*/
public void writeTag(long bucketIdx, int posInBucket, long tag) {
long tagIdx = startPos(bucketIdx, posInBucket);
long mask = MASKS[bitsPerTag];
for(long i=tagIdx; i<tagIdx+bitsPerTag; i++) {
if((mask & tag) == 0L) {
bitset.unset(i);
} else {
bitset.set(i);
}
mask = mask >> 1;
}
}
/**
* Read tag in bucket with bucketIdx from pos
*
* @param bucketIdx target bucket index
* @param posInBucket concrete tag position in bucket
* @return tag in given position inside bucket
*/
public final long readTag(long bucketIdx, int posInBucket) {
long tagIdx = startPos(bucketIdx, posInBucket);
long tag = 0L;
long mask = MASKS[bitsPerTag];
for(long i=tagIdx; i<tagIdx+bitsPerTag; i++) {
if(bitset.get(i)) {
tag |= mask;
}
mask = mask >> 1;
}
return tag;
}
/**
* Return first free position
* in bucket if exists
*
* @param bucketIdx - target bucket index
* @return free position in given bucket or -1 if none
*/
public int getFreePosInBucket(long bucketIdx) {
return checkTag(bucketIdx, 0L);
}
/**
* Check whether given tag exists
* in the bucket
*
* @param tag - tag to check
* @return index of given tag or -1 if none
*/
public int checkTag(long bucketIdx, long tag) {
for(int pos=0; pos<tagsPerBucket; pos++) {
if(tag == readTag(bucketIdx, pos)) {
return pos;
}
}
return -1;
}
public long sizeInBits() {
return bitset.bitSize();
}
public void putAll(BucketSet other) throws Exception {
this.bitset.putAll(other.bitset);
}
@Override
public void close() {
log.log(Level.FINE, "Closing Bucket Set");
try{
bitset.close();
} catch (Exception err) {
log.log(Level.SEVERE, "Can not close BucketSet", err);
}
}
public void clear() {
bitset.clear();
}
}
|
|
package org.jenkinsci.plugins.visual_diff;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Extension;
import hudson.model.*;
import hudson.tasks.BuildStepDescriptor;
import hudson.util.FormValidation;
import org.apache.commons.lang.math.NumberUtils;
import org.jenkinsci.plugins.visual_diff.comparison.ComparisonDescribable;
import org.jenkinsci.plugins.visual_diff.data.Screen;
import org.jenkinsci.plugins.visual_diff.data.ScreenList;
import org.jenkinsci.plugins.visual_diff.utils.BuildArtifacts;
import org.jenkinsci.plugins.visual_diff.utils.ProjectArtifacts;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
/**
* Builder
*
* @author Marcel Erz
*/
public class Builder extends hudson.tasks.Builder {
public static final String FAILED = "failed";
public static final String UNSTABLE = "unstable";
public static final String NOTHING = "nothing";
/**
* List of all comparisons
*/
private final List<ComparisonDescribable> comparisons;
/**
* Change build result to...
*/
private final String markAs;
/**
* Number of missing screens until build result changes trigger
*/
private final int numberOfMissing;
/**
* Constructor for Builder
*
* @param comparisons List of comparisons
*/
@DataBoundConstructor
public Builder(List<ComparisonDescribable> comparisons, String markAs, int numberOfMissing) {
this.comparisons = comparisons;
this.markAs = markAs;
this.numberOfMissing = numberOfMissing;
}
/**
* Gets all comparisons
*
* @return Comparisons
*/
public List<ComparisonDescribable> getComparisons() {
return comparisons;
}
/**
* Change build result to...
*
* @return Build Result
*/
public String getMarkAs() {
return markAs;
}
/**
* Number of missing screens until build result changes
*
* @return Number of missing screens
*/
public int getNumberOfMissing() {
return numberOfMissing;
}
/**
* Performing build
*
* @param build Current build
* @param launcher Launcher
* @param listener Listener for console
*/
@Override
public boolean perform(AbstractBuild build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException {
ProjectArtifacts projectArtifacts = new ProjectArtifacts(build.getProject());
projectArtifacts.createFolders(listener);
BuildArtifacts buildArtifacts = new BuildArtifacts(build);
buildArtifacts.createFolders(listener);
// Copy all approved screens
listener.getLogger().println("Copy approved screens...");
buildArtifacts.duplicateApprovedProjectScreen();
// Run through all comparisons
DataAction data = new DataAction();
ScreenList completeList = data.getScreenList();
for(ComparisonDescribable comparison : comparisons) {
// Process all screens
ScreenList list = comparison.processAll(build, launcher, listener);
completeList.addAll(list);
}
// Find missing screens
listener.getLogger().println("Find missing screens...");
int missingApprovedScreens = 0;
FilePath[] approvedScreens = projectArtifacts.getScreens();
for(FilePath approvedScreen : approvedScreens) {
String screenName = approvedScreen.getName();
// Screen not found in build-lists
if (!completeList.hasScreenName(screenName)) {
Screen newScreen = new Screen(screenName);
newScreen.approvedImage();
newScreen.approve();
completeList.add(newScreen);
missingApprovedScreens++;
}
}
// Too many differences?
if (missingApprovedScreens >= numberOfMissing) {
listener.getLogger().println("Too many missing.");
// Mark build as requested
if ((markAs != null) && markAs.equals(FAILED)) {
build.setResult(Result.FAILURE);
} else if ((markAs == null) || markAs.equals((UNSTABLE))) {
Result result = build.getResult();
if ((result == null) || result.isBetterThan(Result.UNSTABLE)) {
build.setResult(Result.UNSTABLE);
}
}
}
// Add all action
build.addAction(data);
build.addAction(new CompareAction(build));
return true;
}
/**
* Get descriptor
*
* @return Descriptor
*/
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl)super.getDescriptor();
}
/**
* Descriptor for {@link Builder}.
*/
@Extension
public static final class DescriptorImpl extends BuildStepDescriptor<hudson.tasks.Builder> {
/**
* Descriptor implementation
*/
public DescriptorImpl() {
load();
}
/**
* Gets the title of the Builder descriptor
*
* @return Title
*/
public String getDisplayName() {
return "Visual Diff";
}
/**
* Does apply to all project-types
*
* @param aClass Class of project
* @return Does apply?
*/
public boolean isApplicable(Class<? extends AbstractProject> aClass) {
return true;
}
/**
* Determines a list of available ComparisonDescribable descriptors
*
* @param p Project
* @return List of descriptors
*/
public List<Descriptor<? extends ComparisonDescribable>> getComparisonDescribables(AbstractProject<?, ?> p) {
List<Descriptor<? extends ComparisonDescribable>> list = new LinkedList<Descriptor<? extends ComparisonDescribable>>();
for(Descriptor<? extends ComparisonDescribable> rs : ComparisonDescribable.all()) {
list.add(rs);
}
return list;
}
/**
* Will be called when numberOfMissing field is validated
*
* @param value Value of field
* @return Validation result
* @throws IOException
* @throws InterruptedException
* @throws ServletException
*/
public FormValidation doCheckNumberOfMissing(@QueryParameter String value)
throws IOException, InterruptedException, ServletException {
float number = NumberUtils.toInt(value, -1);
if (number == -1)
return FormValidation.error("Please enter a number!");
if (number < 1)
return FormValidation.error("The value should be greater than or equal to one.");
return FormValidation.ok();
}
}
}
|
|
/**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.jingleold.packet;
import org.jivesoftware.smack.packet.IQ;
import org.jivesoftware.smackx.jingleold.JingleActionEnum;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* An Jingle sub-packet, which is used by XMPP clients to exchange info like
* descriptions and transports. <p/> The following link summarizes the
* requirements of Jingle IM: <a
* href="http://www.xmpp.org/extensions/jep-0166.html">Valid tags</a>.
* <p/>
* <p/> Warning: this is an non-standard protocol documented by <a
* href="http://www.xmpp.org/extensions/jep-0166.html">XEP-166</a>. Because this is
* a non-standard protocol, it is subject to change.
*
* @author Alvaro Saurin
*/
public class Jingle extends IQ {
// static
public static final String NAMESPACE = "urn:xmpp:tmp:jingle";
public static final String NODENAME = "jingle";
// non-static
private String sid; // The session id
private JingleActionEnum action; // The action associated to the Jingle
private String initiator; // The initiator as a "user@host/resource"
private String responder; // The responder
// Sub-elements of a Jingle object.
private final List<JingleContent> contents = new ArrayList<JingleContent>();
private JingleContentInfo contentInfo;
/**
* A constructor where the main components can be initialized.
*/
public Jingle(final List<JingleContent> contents, final JingleContentInfo mi,
final String sid) {
this();
if (contents != null) {
contents.addAll(contents);
}
setContentInfo(mi);
setSid(sid);
// Set null all other fields in the packet
initiator = null;
responder = null;
action = null;
}
/**
* Constructor with a contents.
*
* @param content a content
*/
public Jingle(final JingleContent content) {
this();
addContent(content);
// Set null all other fields in the packet
initiator = null;
responder = null;
// Some default values for the most common situation...
action = JingleActionEnum.UNKNOWN;
this.setType(IQ.Type.set);
}
/**
* Constructor with a content info.
*
* @param info The content info
*/
public Jingle(final JingleContentInfo info) {
this();
setContentInfo(info);
// Set null all other fields in the packet
initiator = null;
responder = null;
// Some default values for the most common situation...
action = JingleActionEnum.UNKNOWN;
this.setType(IQ.Type.set);
}
/**
* A constructor where the action can be specified.
*
* @param action The action.
*/
public Jingle(final JingleActionEnum action) {
this(null, null, null);
this.action = action;
// In general, a Jingle with an action is used in a SET packet...
this.setType(IQ.Type.set);
}
/**
* A constructor where the session ID can be specified.
*
* @param sid The session ID related to the negotiation.
* @see #setSid(String)
*/
public Jingle(final String sid) {
this(null, null, sid);
}
/**
* The default constructor
*/
public Jingle() {
super(NODENAME, NAMESPACE);
}
/**
* Set the session ID related to this session. The session ID is a unique
* identifier generated by the initiator. This should match the XML Nmtoken
* production so that XML character escaping is not needed for characters
* such as &.
*
* @param sid the session ID
*/
public final void setSid(final String sid) {
this.sid = sid;
}
/**
* Returns the session ID related to the session. The session ID is a unique
* identifier generated by the initiator. This should match the XML Nmtoken
* production so that XML character escaping is not needed for characters
* such as &.
*
* @return Returns the session ID related to the session.
* @see #setSid(String)
*/
public String getSid() {
return sid;
}
/**
* Returns the XML element name of the extension sub-packet root element.
* Always returns "jingle"
*
* @return the XML element name of the packet extension.
*/
public static String getElementName() {
return NODENAME;
}
/**
* Returns the XML namespace of the extension sub-packet root element.
*
* @return the XML namespace of the packet extension.
*/
public static String getNamespace() {
return NAMESPACE;
}
/**
* @return the audioInfo
*/
public JingleContentInfo getContentInfo() {
return contentInfo;
}
/**
* @param contentInfo the audioInfo to set
*/
public void setContentInfo(final JingleContentInfo contentInfo) {
this.contentInfo = contentInfo;
}
/**
* Get an iterator for the contents
*
* @return the contents
*/
public Iterator<JingleContent> getContents() {
synchronized (contents) {
return Collections.unmodifiableList(new ArrayList<JingleContent>(contents)).iterator();
}
}
/**
* Get an iterator for the content
*
* @return the contents
*/
public List<JingleContent> getContentsList() {
synchronized (contents) {
return new ArrayList<JingleContent>(contents);
}
}
/**
* Add a new content.
*
* @param content the content to add
*/
public void addContent(final JingleContent content) {
if (content != null) {
synchronized (contents) {
contents.add(content);
}
}
}
/**
* Add a list of JingleContent elements
*
* @param contentList the list of contents to add
*/
public void addContents(final List<JingleContent> contentList) {
if (contentList != null) {
synchronized (contents) {
contents.addAll(contentList);
}
}
}
/**
* Get the action specified in the packet
*
* @return the action
*/
public JingleActionEnum getAction() {
return action;
}
/**
* Set the action in the packet
*
* @param action the action to set
*/
public void setAction(final JingleActionEnum action) {
this.action = action;
}
/**
* Get the initiator. The initiator will be the full JID of the entity that
* has initiated the flow (which may be different to the "from" address in
* the IQ)
*
* @return the initiator
*/
public String getInitiator() {
return initiator;
}
/**
* Set the initiator. The initiator must be the full JID of the entity that
* has initiated the flow (which may be different to the "from" address in
* the IQ)
*
* @param initiator the initiator to set
*/
public void setInitiator(final String initiator) {
this.initiator = initiator;
}
/**
* Get the responder. The responder is the full JID of the entity that has
* replied to the initiation (which may be different to the "to" addresss in
* the IQ).
*
* @return the responder
*/
public String getResponder() {
return responder;
}
/**
* Set the responder. The responder must be the full JID of the entity that
* has replied to the initiation (which may be different to the "to"
* addresss in the IQ).
*
* @param resp the responder to set
*/
public void setResponder(final String resp) {
responder = resp;
}
/**
* Get a hash key for the session this packet belongs to.
*
* @param sid The session id
* @param initiator The initiator
* @return A hash key
*/
public static int getSessionHash(final String sid, final String initiator) {
final int PRIME = 31;
int result = 1;
result = PRIME * result + (initiator == null ? 0 : initiator.hashCode());
result = PRIME * result + (sid == null ? 0 : sid.hashCode());
return result;
}
/**
* Return the XML representation of the packet.
*
* @return the XML string
*/
protected IQChildElementXmlStringBuilder getIQChildElementBuilder(IQChildElementXmlStringBuilder buf) {
if (getInitiator() != null) {
buf.append(" initiator=\"").append(getInitiator()).append("\"");
}
if (getResponder() != null) {
buf.append(" responder=\"").append(getResponder()).append("\"");
}
if (getAction() != null) {
buf.append(" action=\"").append(getAction().name()).append("\"");
}
if (getSid() != null) {
buf.append(" sid=\"").append(getSid()).append("\"");
}
buf.append(">");
synchronized (contents) {
for (JingleContent content : contents) {
buf.append(content.toXML());
}
}
// and the same for audio jmf info
if (contentInfo != null) {
buf.append(contentInfo.toXML());
}
return buf;
}
}
|